Class org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSinkUnitTest

47

tests

0

failures

0

ignored

42.191s

duration

100%

successful

Tests

Test Duration Result
readsSinkHDFSTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) 2.768s passed
readsSinkHDFSTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) 1.927s passed
readsSinkHDFSTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) 2.471s passed
readsSinkHDFSTest[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) 1.714s passed
readsSinkHDFSTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) 1.137s passed
readsSinkHDFSTest[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) 2.115s passed
readsSinkHDFSTest[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) 2.068s passed
readsSinkHDFSTest[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) 1.405s passed
readsSinkHDFSTest[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) 0.467s passed
readsSinkShardedTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) 0.400s passed
readsSinkShardedTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) 0.396s passed
readsSinkShardedTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) 0.366s passed
readsSinkShardedTest[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) 0.356s passed
readsSinkShardedTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) 0.339s passed
readsSinkShardedTest[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) 0.346s passed
readsSinkShardedTest[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) 0.346s passed
readsSinkShardedTest[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) 0.303s passed
readsSinkShardedTest[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) 0.334s passed
readsSinkTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) 0.442s passed
readsSinkTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) 0.408s passed
readsSinkTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) 0.506s passed
readsSinkTest[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) 0.401s passed
readsSinkTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) 0.483s passed
readsSinkTest[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) 0.419s passed
readsSinkTest[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) 0.395s passed
readsSinkTest[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) 0.460s passed
readsSinkTest[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) 0.357s passed
testReadsSparkSinkNotSortingReadsToHeader 0.288s passed
testSpecifyPartsDir[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) 0.423s passed
testSpecifyPartsDir[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) 0.405s passed
testSpecifyPartsDir[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) 0.501s passed
testSpecifyPartsDir[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) 0.387s passed
testSpecifyPartsDir[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) 0.500s passed
testSpecifyPartsDir[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) 0.419s passed
testSpecifyPartsDir[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) 0.379s passed
testSpecifyPartsDir[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) 0.421s passed
testSpecifyPartsDir[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) 0.352s passed
testWritingToAnExistingFileHDFS[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) 1.674s passed
testWritingToAnExistingFileHDFS[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) 2.052s passed
testWritingToAnExistingFileHDFS[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) 2.251s passed
testWritingToAnExistingFileHDFS[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) 1.612s passed
testWritingToAnExistingFileHDFS[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) 1.789s passed
testWritingToAnExistingFileHDFS[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) 0.824s passed
testWritingToAnExistingFileHDFS[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) 1.646s passed
testWritingToAnExistingFileHDFS[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) 1.600s passed
testWritingToAnExistingFileHDFS[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) 1.148s passed
testWritingToFileURL 0.391s passed

Standard error

20:33:10.840 INFO  MiniDFSCluster - starting cluster: numNameNodes=1, numDataNodes=1
20:33:11.067 INFO  NameNode - Formatting using clusterid: testClusterID
20:33:11.077 INFO  FSEditLog - Edit logging is async:true
20:33:11.092 INFO  FSNamesystem - KeyProvider: null
20:33:11.093 INFO  FSNamesystem - fsLock is fair: true
20:33:11.093 INFO  FSNamesystem - Detailed lock hold time metrics enabled: false
20:33:11.094 INFO  FSNamesystem - fsOwner                = runner (auth:SIMPLE)
20:33:11.094 INFO  FSNamesystem - supergroup             = supergroup
20:33:11.094 INFO  FSNamesystem - isPermissionEnabled    = true
20:33:11.094 INFO  FSNamesystem - isStoragePolicyEnabled = true
20:33:11.094 INFO  FSNamesystem - HA Enabled: false
20:33:11.123 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
20:33:11.127 INFO  deprecation - hadoop.configured.node.mapping is deprecated. Instead, use net.topology.configured.node.mapping
20:33:11.127 INFO  DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000
20:33:11.127 INFO  DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true
20:33:11.129 INFO  BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000
20:33:11.129 INFO  BlockManager - The block deletion will start around 2025 Jul 15 20:33:11
20:33:11.130 INFO  GSet - Computing capacity for map BlocksMap
20:33:11.130 INFO  GSet - VM type       = 64-bit
20:33:11.131 INFO  GSet - 2.0% max memory 3.4 GB = 70 MB
20:33:11.131 INFO  GSet - capacity      = 2^23 = 8388608 entries
20:33:11.138 INFO  BlockManager - Storage policy satisfier is disabled
20:33:11.138 INFO  BlockManager - dfs.block.access.token.enable = false
20:33:11.142 INFO  BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999
20:33:11.143 INFO  BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0
20:33:11.143 INFO  BlockManagerSafeMode - dfs.namenode.safemode.extension = 0
20:33:11.143 INFO  BlockManager - defaultReplication         = 1
20:33:11.143 INFO  BlockManager - maxReplication             = 512
20:33:11.143 INFO  BlockManager - minReplication             = 1
20:33:11.143 INFO  BlockManager - maxReplicationStreams      = 2
20:33:11.143 INFO  BlockManager - redundancyRecheckInterval  = 3000ms
20:33:11.143 INFO  BlockManager - encryptDataTransfer        = false
20:33:11.143 INFO  BlockManager - maxNumBlocksToLog          = 1000
20:33:11.160 INFO  FSDirectory - GLOBAL serial map: bits=29 maxEntries=536870911
20:33:11.160 INFO  FSDirectory - USER serial map: bits=24 maxEntries=16777215
20:33:11.160 INFO  FSDirectory - GROUP serial map: bits=24 maxEntries=16777215
20:33:11.160 INFO  FSDirectory - XATTR serial map: bits=24 maxEntries=16777215
20:33:11.167 INFO  GSet - Computing capacity for map INodeMap
20:33:11.167 INFO  GSet - VM type       = 64-bit
20:33:11.167 INFO  GSet - 1.0% max memory 3.4 GB = 35 MB
20:33:11.167 INFO  GSet - capacity      = 2^22 = 4194304 entries
20:33:11.168 INFO  FSDirectory - ACLs enabled? true
20:33:11.168 INFO  FSDirectory - POSIX ACL inheritance enabled? true
20:33:11.168 INFO  FSDirectory - XAttrs enabled? true
20:33:11.168 INFO  NameNode - Caching file names occurring more than 10 times
20:33:11.171 INFO  SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotLimit: 65536
20:33:11.172 INFO  SnapshotManager - SkipList is disabled
20:33:11.175 INFO  GSet - Computing capacity for map cachedBlocks
20:33:11.175 INFO  GSet - VM type       = 64-bit
20:33:11.175 INFO  GSet - 0.25% max memory 3.4 GB = 8.8 MB
20:33:11.175 INFO  GSet - capacity      = 2^20 = 1048576 entries
20:33:11.179 INFO  TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10
20:33:11.179 INFO  TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10
20:33:11.179 INFO  TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
20:33:11.181 INFO  FSNamesystem - Retry cache on namenode is enabled
20:33:11.181 INFO  FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
20:33:11.182 INFO  GSet - Computing capacity for map NameNodeRetryCache
20:33:11.182 INFO  GSet - VM type       = 64-bit
20:33:11.182 INFO  GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB
20:33:11.182 INFO  GSet - capacity      = 2^17 = 131072 entries
20:33:11.192 INFO  FSImage - Allocated new BlockPoolId: BP-169878775-10.1.0.111-1752611591188
20:33:11.196 INFO  Storage - Storage directory /tmp/minicluster_storage11240959748026123074/name-0-1 has been successfully formatted.
20:33:11.198 INFO  Storage - Storage directory /tmp/minicluster_storage11240959748026123074/name-0-2 has been successfully formatted.
20:33:11.216 INFO  FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage11240959748026123074/name-0-2/current/fsimage.ckpt_0000000000000000000 using no compression
20:33:11.216 INFO  FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage11240959748026123074/name-0-1/current/fsimage.ckpt_0000000000000000000 using no compression
20:33:11.289 INFO  FSImageFormatProtobuf - Image file /tmp/minicluster_storage11240959748026123074/name-0-1/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds .
20:33:11.289 INFO  FSImageFormatProtobuf - Image file /tmp/minicluster_storage11240959748026123074/name-0-2/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds .
20:33:11.300 INFO  NNStorageRetentionManager - Going to retain 1 images with txid >= 0
20:33:11.357 INFO  FSNamesystem - Stopping services started for active state
20:33:11.358 INFO  FSNamesystem - Stopping services started for standby state
20:33:11.359 INFO  NameNode - createNameNode []
20:33:11.392 WARN  MetricsConfig - Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
20:33:11.399 INFO  MetricsSystemImpl - Scheduled Metric snapshot period at 10 second(s).
20:33:11.399 INFO  MetricsSystemImpl - NameNode metrics system started
20:33:11.403 INFO  NameNodeUtils - fs.defaultFS is hdfs://127.0.0.1:0
20:33:11.425 INFO  JvmPauseMonitor - Starting JVM pause monitor
20:33:11.436 INFO  DFSUtil - Filter initializers set : org.apache.hadoop.http.lib.StaticUserWebFilter,org.apache.hadoop.hdfs.web.AuthFilterInitializer
20:33:11.440 INFO  DFSUtil - Starting Web-server for hdfs at: http://localhost:0
20:33:11.449 INFO  log - Logging initialized @27198ms to org.eclipse.jetty.util.log.Slf4jLog
20:33:11.514 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
20:33:11.517 WARN  HttpRequestLog - Jetty request log can only be enabled using Log4j
20:33:11.521 INFO  HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
20:33:11.522 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context hdfs
20:33:11.522 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
20:33:11.524 INFO  HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context hdfs
20:33:11.524 INFO  HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context static
20:33:11.550 INFO  HttpServer2 - addJerseyResourcePackage: packageName=org.apache.hadoop.hdfs.server.namenode.web.resources;org.apache.hadoop.hdfs.web.resources, pathSpec=/webhdfs/v1/*
20:33:11.554 INFO  HttpServer2 - Jetty bound to port 46017
20:33:11.555 INFO  Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10
20:33:11.577 INFO  session - DefaultSessionIdManager workerName=node0
20:33:11.577 INFO  session - No SessionScavenger set, using defaults
20:33:11.578 INFO  session - node0 Scavenging every 600000ms
20:33:11.589 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
20:33:11.590 INFO  ContextHandler - Started o.e.j.s.ServletContextHandler@5a27a096{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.3.6/5058b645375c6a68f509e167ad6a6ada9642df09/hadoop-hdfs-3.3.6-tests.jar!/webapps/static,AVAILABLE}
20:33:11.724 INFO  ContextHandler - Started o.e.j.w.WebAppContext@7a00446{hdfs,/,file:///tmp/jetty-localhost-46017-hadoop-hdfs-3_3_6-tests_jar-_-any-18083912115602526887/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.3.6/5058b645375c6a68f509e167ad6a6ada9642df09/hadoop-hdfs-3.3.6-tests.jar!/webapps/hdfs}
20:33:11.728 INFO  AbstractConnector - Started ServerConnector@508a1ef0{HTTP/1.1, (http/1.1)}{localhost:46017}
20:33:11.728 INFO  Server - Started @27478ms
20:33:11.732 INFO  FSEditLog - Edit logging is async:true
20:33:11.741 INFO  FSNamesystem - KeyProvider: null
20:33:11.741 INFO  FSNamesystem - fsLock is fair: true
20:33:11.742 INFO  FSNamesystem - Detailed lock hold time metrics enabled: false
20:33:11.742 INFO  FSNamesystem - fsOwner                = runner (auth:SIMPLE)
20:33:11.742 INFO  FSNamesystem - supergroup             = supergroup
20:33:11.742 INFO  FSNamesystem - isPermissionEnabled    = true
20:33:11.742 INFO  FSNamesystem - isStoragePolicyEnabled = true
20:33:11.742 INFO  FSNamesystem - HA Enabled: false
20:33:11.742 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
20:33:11.742 INFO  DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000
20:33:11.742 INFO  DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true
20:33:11.743 INFO  BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000
20:33:11.743 INFO  BlockManager - The block deletion will start around 2025 Jul 15 20:33:11
20:33:11.743 INFO  GSet - Computing capacity for map BlocksMap
20:33:11.743 INFO  GSet - VM type       = 64-bit
20:33:11.743 INFO  GSet - 2.0% max memory 3.4 GB = 70 MB
20:33:11.743 INFO  GSet - capacity      = 2^23 = 8388608 entries
20:33:11.745 INFO  BlockManager - Storage policy satisfier is disabled
20:33:11.745 INFO  BlockManager - dfs.block.access.token.enable = false
20:33:11.745 INFO  BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999
20:33:11.745 INFO  BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0
20:33:11.745 INFO  BlockManagerSafeMode - dfs.namenode.safemode.extension = 0
20:33:11.745 INFO  BlockManager - defaultReplication         = 1
20:33:11.745 INFO  BlockManager - maxReplication             = 512
20:33:11.745 INFO  BlockManager - minReplication             = 1
20:33:11.745 INFO  BlockManager - maxReplicationStreams      = 2
20:33:11.745 INFO  BlockManager - redundancyRecheckInterval  = 3000ms
20:33:11.745 INFO  BlockManager - encryptDataTransfer        = false
20:33:11.745 INFO  BlockManager - maxNumBlocksToLog          = 1000
20:33:11.746 INFO  GSet - Computing capacity for map INodeMap
20:33:11.746 INFO  GSet - VM type       = 64-bit
20:33:11.746 INFO  GSet - 1.0% max memory 3.4 GB = 35 MB
20:33:11.746 INFO  GSet - capacity      = 2^22 = 4194304 entries
20:33:11.757 INFO  FSDirectory - ACLs enabled? true
20:33:11.757 INFO  FSDirectory - POSIX ACL inheritance enabled? true
20:33:11.757 INFO  FSDirectory - XAttrs enabled? true
20:33:11.757 INFO  NameNode - Caching file names occurring more than 10 times
20:33:11.757 INFO  SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotLimit: 65536
20:33:11.757 INFO  SnapshotManager - SkipList is disabled
20:33:11.757 INFO  GSet - Computing capacity for map cachedBlocks
20:33:11.757 INFO  GSet - VM type       = 64-bit
20:33:11.757 INFO  GSet - 0.25% max memory 3.4 GB = 8.8 MB
20:33:11.757 INFO  GSet - capacity      = 2^20 = 1048576 entries
20:33:11.757 INFO  TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10
20:33:11.757 INFO  TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10
20:33:11.757 INFO  TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
20:33:11.757 INFO  FSNamesystem - Retry cache on namenode is enabled
20:33:11.757 INFO  FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
20:33:11.757 INFO  GSet - Computing capacity for map NameNodeRetryCache
20:33:11.757 INFO  GSet - VM type       = 64-bit
20:33:11.757 INFO  GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB
20:33:11.757 INFO  GSet - capacity      = 2^17 = 131072 entries
20:33:11.759 INFO  BlockManagerInfo - Removed broadcast_32_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1920.0 MiB)
20:33:11.761 INFO  BlockManagerInfo - Removed broadcast_31_piece0 on localhost:45281 in memory (size: 320.0 B, free: 1920.0 MiB)
20:33:11.761 INFO  Storage - Lock on /tmp/minicluster_storage11240959748026123074/name-0-1/in_use.lock acquired by nodename 3077@pkrvmq0rgcvqdmg
20:33:11.763 INFO  Storage - Lock on /tmp/minicluster_storage11240959748026123074/name-0-2/in_use.lock acquired by nodename 3077@pkrvmq0rgcvqdmg
20:33:11.764 INFO  FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage11240959748026123074/name-0-1/current
20:33:11.764 INFO  FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage11240959748026123074/name-0-2/current
20:33:11.764 INFO  FSImage - No edit log streams selected.
20:33:11.764 INFO  FSImage - Planning to load image: FSImageFile(file=/tmp/minicluster_storage11240959748026123074/name-0-1/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
20:33:11.765 INFO  BlockManagerInfo - Removed broadcast_22_piece0 on localhost:45281 in memory (size: 159.0 B, free: 1920.0 MiB)
20:33:11.768 INFO  BlockManagerInfo - Removed broadcast_28_piece0 on localhost:45281 in memory (size: 320.0 B, free: 1920.0 MiB)
20:33:11.771 INFO  BlockManagerInfo - Removed broadcast_24_piece0 on localhost:45281 in memory (size: 4.3 KiB, free: 1920.0 MiB)
20:33:11.773 INFO  BlockManagerInfo - Removed broadcast_23_piece0 on localhost:45281 in memory (size: 465.0 B, free: 1920.0 MiB)
20:33:11.776 INFO  BlockManagerInfo - Removed broadcast_26_piece0 on localhost:45281 in memory (size: 3.2 KiB, free: 1920.0 MiB)
20:33:11.777 INFO  BlockManagerInfo - Removed broadcast_33_piece0 on localhost:45281 in memory (size: 4.8 KiB, free: 1920.0 MiB)
20:33:11.779 INFO  BlockManagerInfo - Removed broadcast_30_piece0 on localhost:45281 in memory (size: 4.7 KiB, free: 1920.0 MiB)
20:33:11.781 INFO  BlockManagerInfo - Removed broadcast_25_piece0 on localhost:45281 in memory (size: 4.5 KiB, free: 1920.0 MiB)
20:33:11.783 INFO  FSImageFormatPBINode - Loading 1 INodes.
20:33:11.784 INFO  BlockManagerInfo - Removed broadcast_27_piece0 on localhost:45281 in memory (size: 5.1 KiB, free: 1920.0 MiB)
20:33:11.784 INFO  FSImageFormatPBINode - Successfully loaded 1 inodes
20:33:11.785 INFO  BlockManagerInfo - Removed broadcast_29_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1920.0 MiB)
20:33:11.786 INFO  BlockManager - Removing RDD 47
20:33:11.787 INFO  FSImageFormatPBINode - Completed update blocks map and name cache, total waiting duration 0ms.
20:33:11.788 INFO  FSImageFormatProtobuf - Loaded FSImage in 0 seconds.
20:33:11.788 INFO  FSImage - Loaded image for txid 0 from /tmp/minicluster_storage11240959748026123074/name-0-1/current/fsimage_0000000000000000000
20:33:11.791 INFO  FSNamesystem - Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
20:33:11.791 INFO  FSEditLog - Starting log segment at 1
20:33:11.799 INFO  NameCache - initialized with 0 entries 0 lookups
20:33:11.799 INFO  FSNamesystem - Finished loading FSImage in 40 msecs
20:33:11.862 INFO  NameNode - RPC server is binding to localhost:0
20:33:11.862 INFO  NameNode - Enable NameNode state context:false
20:33:11.865 INFO  CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false.
20:33:11.872 INFO  Server - Listener at localhost:44977
20:33:11.872 INFO  Server - Starting Socket Reader #1 for port 0
20:33:11.893 INFO  NameNode - Clients are to use localhost:44977 to access this namenode/service.
20:33:11.894 INFO  FSNamesystem - Registered FSNamesystemState, ReplicatedBlocksState and ECBlockGroupsState MBeans.
20:33:11.907 INFO  LeaseManager - Number of blocks under construction: 0
20:33:11.912 INFO  DatanodeAdminDefaultMonitor - Initialized the Default Decommission and Maintenance monitor
20:33:11.913 INFO  BlockManager - Start MarkedDeleteBlockScrubber thread
20:33:11.914 INFO  BlockManager - initializing replication queues
20:33:11.914 INFO  StateChange - STATE* Leaving safe mode after 0 secs
20:33:11.914 INFO  StateChange - STATE* Network topology has 0 racks and 0 datanodes
20:33:11.914 INFO  StateChange - STATE* UnderReplicatedBlocks has 0 blocks
20:33:11.917 INFO  BlockManager - Total number of blocks            = 0
20:33:11.917 INFO  BlockManager - Number of invalid blocks          = 0
20:33:11.917 INFO  BlockManager - Number of under-replicated blocks = 0
20:33:11.917 INFO  BlockManager - Number of  over-replicated blocks = 0
20:33:11.917 INFO  BlockManager - Number of blocks being written    = 0
20:33:11.917 INFO  StateChange - STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 3 msec
20:33:11.931 INFO  Server - IPC Server Responder: starting
20:33:11.932 INFO  Server - IPC Server listener on 0: starting
20:33:11.934 INFO  NameNode - NameNode RPC up at: localhost/127.0.0.1:44977
20:33:11.934 WARN  MetricsLoggerTask - Metrics logging will not be async since the logger is not log4j
20:33:11.935 INFO  FSNamesystem - Starting services required for active state
20:33:11.935 INFO  FSDirectory - Initializing quota with 12 thread(s)
20:33:11.936 INFO  FSDirectory - Quota initialization completed in 1 milliseconds
name space=1
storage space=0
storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0, PROVIDED=0
20:33:11.939 INFO  CacheReplicationMonitor - Starting CacheReplicationMonitor with interval 30000 milliseconds
20:33:11.945 INFO  MiniDFSCluster - Starting DataNode 0 with dfs.datanode.data.dir: [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data1,[DISK]file:/tmp/minicluster_storage11240959748026123074/data/data2
20:33:11.955 INFO  ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data1
20:33:11.962 INFO  ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data2
20:33:11.973 INFO  MetricsSystemImpl - DataNode metrics system started (again)
20:33:11.977 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
20:33:11.979 INFO  BlockScanner - Initialized block scanner with targetBytesPerSec 1048576
20:33:11.981 INFO  DataNode - Configured hostname is 127.0.0.1
20:33:11.983 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
20:33:11.983 INFO  DataNode - Starting DataNode with maxLockedMemory = 0
20:33:11.986 INFO  DataNode - Opened streaming server at /127.0.0.1:35765
20:33:11.987 INFO  DataNode - Balancing bandwidth is 104857600 bytes/s
20:33:11.987 INFO  DataNode - Number threads for balancing is 100
20:33:11.991 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
20:33:11.991 WARN  HttpRequestLog - Jetty request log can only be enabled using Log4j
20:33:11.992 INFO  HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
20:33:11.993 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode
20:33:11.993 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
20:33:11.994 INFO  HttpServer2 - Jetty bound to port 39269
20:33:11.994 INFO  Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10
20:33:11.995 INFO  session - DefaultSessionIdManager workerName=node0
20:33:11.995 INFO  session - No SessionScavenger set, using defaults
20:33:11.995 INFO  session - node0 Scavenging every 600000ms
20:33:11.996 INFO  ContextHandler - Started o.e.j.s.ServletContextHandler@2366604{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.3.6/5058b645375c6a68f509e167ad6a6ada9642df09/hadoop-hdfs-3.3.6-tests.jar!/webapps/static,AVAILABLE}
20:33:12.087 INFO  ContextHandler - Started o.e.j.w.WebAppContext@3c00c7ab{datanode,/,file:///tmp/jetty-localhost-39269-hadoop-hdfs-3_3_6-tests_jar-_-any-14924105205596456916/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.3.6/5058b645375c6a68f509e167ad6a6ada9642df09/hadoop-hdfs-3.3.6-tests.jar!/webapps/datanode}
20:33:12.088 INFO  AbstractConnector - Started ServerConnector@4e025001{HTTP/1.1, (http/1.1)}{localhost:39269}
20:33:12.088 INFO  Server - Started @27837ms
20:33:12.092 WARN  DatanodeHttpServer - Got null for restCsrfPreventionFilter - will not do any filtering.
20:33:12.093 INFO  DatanodeHttpServer - Listening HTTP traffic on /127.0.0.1:41585
20:33:12.093 INFO  JvmPauseMonitor - Starting JVM pause monitor
20:33:12.094 INFO  DataNode - dnUserName = runner
20:33:12.094 INFO  DataNode - supergroup = supergroup
20:33:12.101 INFO  CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false.
20:33:12.101 INFO  Server - Listener at localhost:41047
20:33:12.102 INFO  Server - Starting Socket Reader #1 for port 0
20:33:12.105 INFO  DataNode - Opened IPC server at /127.0.0.1:41047
20:33:12.120 INFO  DataNode - Refresh request received for nameservices: null
20:33:12.121 INFO  DataNode - Starting BPOfferServices for nameservices: <default>
20:33:12.128 INFO  DataNode - Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:44977 starting to offer service
20:33:12.129 WARN  MetricsLoggerTask - Metrics logging will not be async since the logger is not log4j
20:33:12.130 INFO  Server - IPC Server Responder: starting
20:33:12.130 INFO  Server - IPC Server listener on 0: starting
20:33:12.233 INFO  DataNode - Acknowledging ACTIVE Namenode during handshakeBlock pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:44977
20:33:12.234 INFO  Storage - Using 2 threads to upgrade data directories (dfs.datanode.parallel.volumes.load.threads.num=2, dataDirs=2)
20:33:12.235 INFO  Storage - Lock on /tmp/minicluster_storage11240959748026123074/data/data1/in_use.lock acquired by nodename 3077@pkrvmq0rgcvqdmg
20:33:12.235 INFO  Storage - Storage directory with location [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data1 is not formatted for namespace 1741877403. Formatting...
20:33:12.236 INFO  Storage - Generated new storageID DS-df85c766-e893-4737-8b03-357cd789a447 for directory /tmp/minicluster_storage11240959748026123074/data/data1 
20:33:12.238 INFO  Storage - Lock on /tmp/minicluster_storage11240959748026123074/data/data2/in_use.lock acquired by nodename 3077@pkrvmq0rgcvqdmg
20:33:12.238 INFO  Storage - Storage directory with location [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data2 is not formatted for namespace 1741877403. Formatting...
20:33:12.238 INFO  Storage - Generated new storageID DS-377617af-b02c-4ff6-8304-80e03ecef754 for directory /tmp/minicluster_storage11240959748026123074/data/data2 
20:33:12.245 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=datanodeReport	src=null	dst=null	perm=null	proto=rpc
20:33:12.249 INFO  MiniDFSCluster - dnInfo.length != numDataNodes
20:33:12.249 INFO  MiniDFSCluster - Waiting for cluster to become active
20:33:12.256 INFO  Storage - Analyzing storage directories for bpid BP-169878775-10.1.0.111-1752611591188
20:33:12.256 INFO  Storage - Locking is disabled for /tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188
20:33:12.257 INFO  Storage - Block pool storage directory for location [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data1 and block pool id BP-169878775-10.1.0.111-1752611591188 is not formatted. Formatting ...
20:33:12.257 INFO  Storage - Formatting block pool BP-169878775-10.1.0.111-1752611591188 directory /tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current
20:33:12.270 INFO  Storage - Analyzing storage directories for bpid BP-169878775-10.1.0.111-1752611591188
20:33:12.270 INFO  Storage - Locking is disabled for /tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188
20:33:12.270 INFO  Storage - Block pool storage directory for location [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data2 and block pool id BP-169878775-10.1.0.111-1752611591188 is not formatted. Formatting ...
20:33:12.270 INFO  Storage - Formatting block pool BP-169878775-10.1.0.111-1752611591188 directory /tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current
20:33:12.271 INFO  DataNode - Setting up storage: nsid=1741877403;bpid=BP-169878775-10.1.0.111-1752611591188;lv=-57;nsInfo=lv=-66;cid=testClusterID;nsid=1741877403;c=1752611591188;bpid=BP-169878775-10.1.0.111-1752611591188;dnuuid=null
20:33:12.273 INFO  DataNode - Generated and persisted new Datanode UUID 5d06df6f-6d54-439e-a2bc-24e6ef6e242e
20:33:12.281 INFO  FsDatasetImpl - The datanode lock is a read write lock
20:33:12.304 INFO  FsDatasetImpl - Added new volume: DS-df85c766-e893-4737-8b03-357cd789a447
20:33:12.304 INFO  FsDatasetImpl - Added volume - [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data1, StorageType: DISK
20:33:12.304 INFO  FsDatasetImpl - Added new volume: DS-377617af-b02c-4ff6-8304-80e03ecef754
20:33:12.304 INFO  FsDatasetImpl - Added volume - [DISK]file:/tmp/minicluster_storage11240959748026123074/data/data2, StorageType: DISK
20:33:12.307 INFO  MemoryMappableBlockLoader - Initializing cache loader: MemoryMappableBlockLoader.
20:33:12.309 INFO  FsDatasetImpl - Registered FSDatasetState MBean
20:33:12.311 INFO  FsDatasetImpl - Adding block pool BP-169878775-10.1.0.111-1752611591188
20:33:12.312 INFO  FsDatasetImpl - Scanning block pool BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data1...
20:33:12.312 INFO  FsDatasetImpl - Scanning block pool BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data2...
20:33:12.315 WARN  FsDatasetImpl - dfsUsed file missing in /tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current, will proceed with Du for space computation calculation, 
20:33:12.315 WARN  FsDatasetImpl - dfsUsed file missing in /tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current, will proceed with Du for space computation calculation, 
20:33:12.344 INFO  FsDatasetImpl - Time taken to scan block pool BP-169878775-10.1.0.111-1752611591188 on /tmp/minicluster_storage11240959748026123074/data/data2: 32ms
20:33:12.344 INFO  FsDatasetImpl - Time taken to scan block pool BP-169878775-10.1.0.111-1752611591188 on /tmp/minicluster_storage11240959748026123074/data/data1: 32ms
20:33:12.344 INFO  FsDatasetImpl - Total time to scan all replicas for block pool BP-169878775-10.1.0.111-1752611591188: 33ms
20:33:12.345 INFO  FsDatasetImpl - Adding replicas to map for block pool BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data1...
20:33:12.345 INFO  FsDatasetImpl - Adding replicas to map for block pool BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data2...
20:33:12.345 INFO  BlockPoolSlice - Replica Cache file: /tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/replicas doesn't exist 
20:33:12.345 INFO  BlockPoolSlice - Replica Cache file: /tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/replicas doesn't exist 
20:33:12.346 INFO  FsDatasetImpl - Time to add replicas to map for block pool BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data1: 1ms
20:33:12.346 INFO  FsDatasetImpl - Time to add replicas to map for block pool BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data2: 1ms
20:33:12.346 INFO  FsDatasetImpl - Total time to add all replicas to map for block pool BP-169878775-10.1.0.111-1752611591188: 2ms
20:33:12.346 INFO  ThrottledAsyncChecker - Scheduling a check for /tmp/minicluster_storage11240959748026123074/data/data1
20:33:12.350 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=datanodeReport	src=null	dst=null	perm=null	proto=rpc
20:33:12.351 INFO  MiniDFSCluster - dnInfo.length != numDataNodes
20:33:12.351 INFO  MiniDFSCluster - Waiting for cluster to become active
20:33:12.351 INFO  DatasetVolumeChecker - Scheduled health check for volume /tmp/minicluster_storage11240959748026123074/data/data1
20:33:12.351 INFO  ThrottledAsyncChecker - Scheduling a check for /tmp/minicluster_storage11240959748026123074/data/data2
20:33:12.352 INFO  DatasetVolumeChecker - Scheduled health check for volume /tmp/minicluster_storage11240959748026123074/data/data2
20:33:12.353 INFO  VolumeScanner - Now scanning bpid BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data2
20:33:12.353 INFO  VolumeScanner - Now scanning bpid BP-169878775-10.1.0.111-1752611591188 on volume /tmp/minicluster_storage11240959748026123074/data/data1
20:33:12.353 INFO  VolumeScanner - VolumeScanner(/tmp/minicluster_storage11240959748026123074/data/data1, DS-df85c766-e893-4737-8b03-357cd789a447): finished scanning block pool BP-169878775-10.1.0.111-1752611591188
20:33:12.353 INFO  VolumeScanner - VolumeScanner(/tmp/minicluster_storage11240959748026123074/data/data2, DS-377617af-b02c-4ff6-8304-80e03ecef754): finished scanning block pool BP-169878775-10.1.0.111-1752611591188
20:33:12.354 WARN  DirectoryScanner - dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1
20:33:12.355 INFO  DirectoryScanner - Periodic Directory Tree Verification scan starting in 7151817ms with interval of 21600000ms and throttle limit of -1ms/s
20:33:12.372 INFO  VolumeScanner - VolumeScanner(/tmp/minicluster_storage11240959748026123074/data/data2, DS-377617af-b02c-4ff6-8304-80e03ecef754): no suitable block pools found to scan.  Waiting 1814399981 ms.
20:33:12.372 INFO  VolumeScanner - VolumeScanner(/tmp/minicluster_storage11240959748026123074/data/data1, DS-df85c766-e893-4737-8b03-357cd789a447): no suitable block pools found to scan.  Waiting 1814399981 ms.
20:33:12.372 INFO  DataNode - Block pool BP-169878775-10.1.0.111-1752611591188 (Datanode Uuid 5d06df6f-6d54-439e-a2bc-24e6ef6e242e) service to localhost/127.0.0.1:44977 beginning handshake with NN
20:33:12.383 INFO  StateChange - BLOCK* registerDatanode: from DatanodeRegistration(127.0.0.1:35765, datanodeUuid=5d06df6f-6d54-439e-a2bc-24e6ef6e242e, infoPort=41585, infoSecurePort=0, ipcPort=41047, storageInfo=lv=-57;cid=testClusterID;nsid=1741877403;c=1752611591188) storage 5d06df6f-6d54-439e-a2bc-24e6ef6e242e
20:33:12.384 INFO  NetworkTopology - Adding a new node: /default-rack/127.0.0.1:35765
20:33:12.384 INFO  BlockReportLeaseManager - Registered DN 5d06df6f-6d54-439e-a2bc-24e6ef6e242e (127.0.0.1:35765).
20:33:12.387 INFO  DataNode - Block pool BP-169878775-10.1.0.111-1752611591188 (Datanode Uuid 5d06df6f-6d54-439e-a2bc-24e6ef6e242e) service to localhost/127.0.0.1:44977 successfully registered with NN
20:33:12.388 INFO  DataNode - Starting IBR Task Handler.
20:33:12.389 INFO  DataNode - For namenode localhost/127.0.0.1:44977 using BLOCKREPORT_INTERVAL of 21600000msecs CACHEREPORT_INTERVAL of 10000msecs Initial delay: 0msecs; heartBeatInterval=3000
20:33:12.398 INFO  DatanodeDescriptor - Adding new storage ID DS-df85c766-e893-4737-8b03-357cd789a447 for DN 127.0.0.1:35765
20:33:12.398 INFO  DatanodeDescriptor - Adding new storage ID DS-377617af-b02c-4ff6-8304-80e03ecef754 for DN 127.0.0.1:35765
20:33:12.405 INFO  DataNode - After receiving heartbeat response, updating state of namenode localhost:44977 to active
20:33:12.414 INFO  BlockStateChange - BLOCK* processReport 0x254e7d992ac3e8e5 with lease ID 0xf62c60299839da1f: Processing first storage report for DS-df85c766-e893-4737-8b03-357cd789a447 from datanode DatanodeRegistration(127.0.0.1:35765, datanodeUuid=5d06df6f-6d54-439e-a2bc-24e6ef6e242e, infoPort=41585, infoSecurePort=0, ipcPort=41047, storageInfo=lv=-57;cid=testClusterID;nsid=1741877403;c=1752611591188)
20:33:12.415 INFO  BlockStateChange - BLOCK* processReport 0x254e7d992ac3e8e5 with lease ID 0xf62c60299839da1f: from storage DS-df85c766-e893-4737-8b03-357cd789a447 node DatanodeRegistration(127.0.0.1:35765, datanodeUuid=5d06df6f-6d54-439e-a2bc-24e6ef6e242e, infoPort=41585, infoSecurePort=0, ipcPort=41047, storageInfo=lv=-57;cid=testClusterID;nsid=1741877403;c=1752611591188), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
20:33:12.415 INFO  BlockStateChange - BLOCK* processReport 0x254e7d992ac3e8e5 with lease ID 0xf62c60299839da1f: Processing first storage report for DS-377617af-b02c-4ff6-8304-80e03ecef754 from datanode DatanodeRegistration(127.0.0.1:35765, datanodeUuid=5d06df6f-6d54-439e-a2bc-24e6ef6e242e, infoPort=41585, infoSecurePort=0, ipcPort=41047, storageInfo=lv=-57;cid=testClusterID;nsid=1741877403;c=1752611591188)
20:33:12.415 INFO  BlockStateChange - BLOCK* processReport 0x254e7d992ac3e8e5 with lease ID 0xf62c60299839da1f: from storage DS-377617af-b02c-4ff6-8304-80e03ecef754 node DatanodeRegistration(127.0.0.1:35765, datanodeUuid=5d06df6f-6d54-439e-a2bc-24e6ef6e242e, infoPort=41585, infoSecurePort=0, ipcPort=41047, storageInfo=lv=-57;cid=testClusterID;nsid=1741877403;c=1752611591188), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
20:33:12.425 INFO  DataNode - Successfully sent block report 0x254e7d992ac3e8e5 with lease ID 0xf62c60299839da1f to namenode: localhost/127.0.0.1:44977,  containing 2 storage report(s), of which we sent 2. The reports had 0 total blocks and used 1 RPC(s). This took 2 msecs to generate and 18 msecs for RPC and NN processing. Got back one command: FinalizeCommand/5.
20:33:12.426 INFO  DataNode - Got finalize command for block pool BP-169878775-10.1.0.111-1752611591188
20:33:12.452 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=datanodeReport	src=null	dst=null	perm=null	proto=rpc
20:33:12.456 INFO  MiniDFSCluster - Cluster is active
20:33:12.523 INFO  MemoryStore - Block broadcast_34 stored as values in memory (estimated size 297.9 KiB, free 1919.7 MiB)
20:33:12.539 INFO  MemoryStore - Block broadcast_34_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.7 MiB)
20:33:12.540 INFO  BlockManagerInfo - Added broadcast_34_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1920.0 MiB)
20:33:12.540 INFO  SparkContext - Created broadcast 34 from newAPIHadoopFile at PathSplitSource.java:96
20:33:12.585 INFO  MemoryStore - Block broadcast_35 stored as values in memory (estimated size 297.9 KiB, free 1919.4 MiB)
20:33:12.594 INFO  MemoryStore - Block broadcast_35_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:12.594 INFO  BlockManagerInfo - Added broadcast_35_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:12.594 INFO  SparkContext - Created broadcast 35 from newAPIHadoopFile at PathSplitSource.java:96
20:33:12.643 INFO  FileInputFormat - Total input files to process : 1
20:33:12.655 INFO  MemoryStore - Block broadcast_36 stored as values in memory (estimated size 160.7 KiB, free 1919.2 MiB)
20:33:12.660 INFO  MemoryStore - Block broadcast_36_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.2 MiB)
20:33:12.660 INFO  BlockManagerInfo - Added broadcast_36_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:12.660 INFO  SparkContext - Created broadcast 36 from broadcast at ReadsSparkSink.java:133
20:33:12.669 INFO  MemoryStore - Block broadcast_37 stored as values in memory (estimated size 163.2 KiB, free 1919.0 MiB)
20:33:12.673 INFO  MemoryStore - Block broadcast_37_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.0 MiB)
20:33:12.674 INFO  BlockManagerInfo - Added broadcast_37_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:12.674 INFO  SparkContext - Created broadcast 37 from broadcast at BamSink.java:76
20:33:12.694 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts	dst=null	perm=null	proto=rpc
20:33:12.699 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:12.700 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:12.700 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:12.707 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:12.731 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:12.732 INFO  DAGScheduler - Registering RDD 77 (mapToPair at SparkUtils.java:161) as input to shuffle 7
20:33:12.733 INFO  DAGScheduler - Got job 20 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:12.733 INFO  DAGScheduler - Final stage: ResultStage 30 (runJob at SparkHadoopWriter.scala:83)
20:33:12.733 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 29)
20:33:12.733 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 29)
20:33:12.733 INFO  DAGScheduler - Submitting ShuffleMapStage 29 (MapPartitionsRDD[77] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:12.764 INFO  MemoryStore - Block broadcast_38 stored as values in memory (estimated size 520.4 KiB, free 1918.5 MiB)
20:33:12.765 INFO  MemoryStore - Block broadcast_38_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1918.3 MiB)
20:33:12.766 INFO  BlockManagerInfo - Added broadcast_38_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.7 MiB)
20:33:12.766 INFO  SparkContext - Created broadcast 38 from broadcast at DAGScheduler.scala:1580
20:33:12.766 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 29 (MapPartitionsRDD[77] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:12.766 INFO  TaskSchedulerImpl - Adding task set 29.0 with 1 tasks resource profile 0
20:33:12.769 INFO  TaskSetManager - Starting task 0.0 in stage 29.0 (TID 67) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:12.769 INFO  Executor - Running task 0.0 in stage 29.0 (TID 67)
20:33:12.828 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:12.878 INFO  Executor - Finished task 0.0 in stage 29.0 (TID 67). 1148 bytes result sent to driver
20:33:12.879 INFO  TaskSetManager - Finished task 0.0 in stage 29.0 (TID 67) in 112 ms on localhost (executor driver) (1/1)
20:33:12.879 INFO  TaskSchedulerImpl - Removed TaskSet 29.0, whose tasks have all completed, from pool 
20:33:12.880 INFO  DAGScheduler - ShuffleMapStage 29 (mapToPair at SparkUtils.java:161) finished in 0.145 s
20:33:12.880 INFO  DAGScheduler - looking for newly runnable stages
20:33:12.880 INFO  DAGScheduler - running: HashSet()
20:33:12.880 INFO  DAGScheduler - waiting: HashSet(ResultStage 30)
20:33:12.880 INFO  DAGScheduler - failed: HashSet()
20:33:12.880 INFO  DAGScheduler - Submitting ResultStage 30 (MapPartitionsRDD[82] at mapToPair at BamSink.java:91), which has no missing parents
20:33:12.890 INFO  MemoryStore - Block broadcast_39 stored as values in memory (estimated size 241.5 KiB, free 1918.1 MiB)
20:33:12.891 INFO  MemoryStore - Block broadcast_39_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1918.0 MiB)
20:33:12.892 INFO  BlockManagerInfo - Added broadcast_39_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.7 MiB)
20:33:12.892 INFO  SparkContext - Created broadcast 39 from broadcast at DAGScheduler.scala:1580
20:33:12.892 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 30 (MapPartitionsRDD[82] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:12.892 INFO  TaskSchedulerImpl - Adding task set 30.0 with 1 tasks resource profile 0
20:33:12.893 INFO  TaskSetManager - Starting task 0.0 in stage 30.0 (TID 68) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:12.893 INFO  Executor - Running task 0.0 in stage 30.0 (TID 68)
20:33:12.909 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:12.910 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:12.981 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:12.981 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:12.981 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:12.981 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:12.982 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:12.982 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:13.002 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:13.019 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:13.021 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:13.041 INFO  StateChange - BLOCK* allocate blk_1073741825_1001, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/part-r-00000
20:33:13.071 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741825_1001 src: /127.0.0.1:59630 dest: /127.0.0.1:35765
20:33:13.096 INFO  clienttrace - src: /127.0.0.1:59630, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741825_1001, duration(ns): 3882406
20:33:13.096 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741825_1001, type=LAST_IN_PIPELINE terminating
20:33:13.099 INFO  FSNamesystem - BLOCK* blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/part-r-00000
20:33:13.502 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:13.503 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:13.506 INFO  StateChange - BLOCK* allocate blk_1073741826_1002, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.sbi
20:33:13.508 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741826_1002 src: /127.0.0.1:59638 dest: /127.0.0.1:35765
20:33:13.510 INFO  clienttrace - src: /127.0.0.1:59638, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741826_1002, duration(ns): 689362
20:33:13.510 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741826_1002, type=LAST_IN_PIPELINE terminating
20:33:13.511 INFO  FSNamesystem - BLOCK* blk_1073741826_1002 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.sbi
20:33:13.912 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:13.916 INFO  StateChange - BLOCK* allocate blk_1073741827_1003, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.bai
20:33:13.918 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741827_1003 src: /127.0.0.1:59646 dest: /127.0.0.1:35765
20:33:13.919 INFO  clienttrace - src: /127.0.0.1:59646, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741827_1003, duration(ns): 756522
20:33:13.919 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741827_1003, type=LAST_IN_PIPELINE terminating
20:33:13.920 INFO  FSNamesystem - BLOCK* blk_1073741827_1003 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.bai
20:33:14.322 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.325 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0	dst=null	perm=null	proto=rpc
20:33:14.330 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0	dst=null	perm=null	proto=rpc
20:33:14.331 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000	dst=null	perm=null	proto=rpc
20:33:14.337 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/_temporary/attempt_202507152033128841024533088119922_0082_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:14.338 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033128841024533088119922_0082_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000
20:33:14.339 INFO  SparkHadoopMapRedUtil - attempt_202507152033128841024533088119922_0082_r_000000_0: Committed. Elapsed time: 9 ms.
20:33:14.340 INFO  Executor - Finished task 0.0 in stage 30.0 (TID 68). 1858 bytes result sent to driver
20:33:14.341 INFO  TaskSetManager - Finished task 0.0 in stage 30.0 (TID 68) in 1448 ms on localhost (executor driver) (1/1)
20:33:14.342 INFO  TaskSchedulerImpl - Removed TaskSet 30.0, whose tasks have all completed, from pool 
20:33:14.342 INFO  DAGScheduler - ResultStage 30 (runJob at SparkHadoopWriter.scala:83) finished in 1.461 s
20:33:14.342 INFO  DAGScheduler - Job 20 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:14.342 INFO  TaskSchedulerImpl - Killing all running tasks in stage 30: Stage finished
20:33:14.342 INFO  DAGScheduler - Job 20 finished: runJob at SparkHadoopWriter.scala:83, took 1.610886 s
20:33:14.344 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033128841024533088119922_0082.
20:33:14.346 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:14.348 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts	dst=null	perm=null	proto=rpc
20:33:14.349 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000	dst=null	perm=null	proto=rpc
20:33:14.350 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:14.351 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.352 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:14.353 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.353 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:14.354 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary/0/task_202507152033128841024533088119922_0082_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.361 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:14.363 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.364 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.367 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.spark-staging-82	dst=null	perm=null	proto=rpc
20:33:14.368 INFO  SparkHadoopWriter - Write Job job_202507152033128841024533088119922_0082 committed. Elapsed time: 23 ms.
20:33:14.369 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.371 INFO  StateChange - BLOCK* allocate blk_1073741828_1004, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/header
20:33:14.372 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741828_1004 src: /127.0.0.1:59660 dest: /127.0.0.1:35765
20:33:14.374 INFO  clienttrace - src: /127.0.0.1:59660, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741828_1004, duration(ns): 772091
20:33:14.374 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741828_1004, type=LAST_IN_PIPELINE terminating
20:33:14.376 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.377 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.378 INFO  StateChange - BLOCK* allocate blk_1073741829_1005, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/terminator
20:33:14.380 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741829_1005 src: /127.0.0.1:59676 dest: /127.0.0.1:35765
20:33:14.381 INFO  clienttrace - src: /127.0.0.1:59676, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741829_1005, duration(ns): 648305
20:33:14.382 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741829_1005, type=LAST_IN_PIPELINE terminating
20:33:14.383 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.384 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts	dst=null	perm=null	proto=rpc
20:33:14.388 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.389 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.389 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam
20:33:14.393 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.394 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.395 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.395 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam done
20:33:14.396 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.397 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi
20:33:14.398 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts	dst=null	perm=null	proto=rpc
20:33:14.400 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.401 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:14.405 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:14.438 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:14.441 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:14.442 INFO  StateChange - BLOCK* allocate blk_1073741830_1006, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi
20:33:14.443 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741830_1006 src: /127.0.0.1:59706 dest: /127.0.0.1:35765
20:33:14.445 INFO  clienttrace - src: /127.0.0.1:59706, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741830_1006, duration(ns): 673868
20:33:14.445 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741830_1006, type=LAST_IN_PIPELINE terminating
20:33:14.446 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.447 INFO  IndexFileMerger - Done merging .sbi files
20:33:14.448 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai
20:33:14.448 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts	dst=null	perm=null	proto=rpc
20:33:14.450 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:14.451 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:14.452 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:14.454 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:14.455 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:14.463 INFO  StateChange - BLOCK* allocate blk_1073741831_1007, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai
20:33:14.464 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741831_1007 src: /127.0.0.1:59708 dest: /127.0.0.1:35765
20:33:14.466 INFO  clienttrace - src: /127.0.0.1:59708, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741831_1007, duration(ns): 1161357
20:33:14.466 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741831_1007, type=LAST_IN_PIPELINE terminating
20:33:14.467 INFO  FSNamesystem - BLOCK* blk_1073741831_1007 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai
20:33:14.868 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:14.869 INFO  IndexFileMerger - Done merging .bai files
20:33:14.870 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.parts	dst=null	perm=null	proto=rpc
20:33:14.880 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.889 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:14.890 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:14.891 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:14.894 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:14.894 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.895 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.896 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.897 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.898 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.899 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.899 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.901 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:14.905 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:14.906 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:14.906 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:14.907 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:14.908 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:14.909 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:14.910 INFO  MemoryStore - Block broadcast_40 stored as values in memory (estimated size 320.0 B, free 1918.0 MiB)
20:33:14.911 INFO  MemoryStore - Block broadcast_40_piece0 stored as bytes in memory (estimated size 233.0 B, free 1918.0 MiB)
20:33:14.911 INFO  BlockManagerInfo - Added broadcast_40_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.7 MiB)
20:33:14.911 INFO  SparkContext - Created broadcast 40 from broadcast at BamSource.java:104
20:33:14.914 INFO  MemoryStore - Block broadcast_41 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:14.921 INFO  MemoryStore - Block broadcast_41_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:14.922 INFO  BlockManagerInfo - Added broadcast_41_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:14.922 INFO  SparkContext - Created broadcast 41 from newAPIHadoopFile at PathSplitSource.java:96
20:33:14.937 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.938 INFO  FileInputFormat - Total input files to process : 1
20:33:14.939 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.960 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:14.960 INFO  DAGScheduler - Got job 21 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:14.960 INFO  DAGScheduler - Final stage: ResultStage 31 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:14.960 INFO  DAGScheduler - Parents of final stage: List()
20:33:14.960 INFO  DAGScheduler - Missing parents: List()
20:33:14.961 INFO  DAGScheduler - Submitting ResultStage 31 (MapPartitionsRDD[88] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:14.969 INFO  MemoryStore - Block broadcast_42 stored as values in memory (estimated size 148.2 KiB, free 1917.5 MiB)
20:33:14.970 INFO  MemoryStore - Block broadcast_42_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.5 MiB)
20:33:14.970 INFO  BlockManagerInfo - Added broadcast_42_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:14.971 INFO  SparkContext - Created broadcast 42 from broadcast at DAGScheduler.scala:1580
20:33:14.971 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 31 (MapPartitionsRDD[88] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:14.971 INFO  TaskSchedulerImpl - Adding task set 31.0 with 1 tasks resource profile 0
20:33:14.972 INFO  TaskSetManager - Starting task 0.0 in stage 31.0 (TID 69) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:14.972 INFO  Executor - Running task 0.0 in stage 31.0 (TID 69)
20:33:14.987 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam:0+237038
20:33:14.988 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.989 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:14.991 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.991 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.992 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:14.998 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:14.999 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:15.002 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:15.007 INFO  Executor - Finished task 0.0 in stage 31.0 (TID 69). 651526 bytes result sent to driver
20:33:15.010 INFO  TaskSetManager - Finished task 0.0 in stage 31.0 (TID 69) in 38 ms on localhost (executor driver) (1/1)
20:33:15.011 INFO  TaskSchedulerImpl - Removed TaskSet 31.0, whose tasks have all completed, from pool 
20:33:15.011 INFO  DAGScheduler - ResultStage 31 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.050 s
20:33:15.011 INFO  DAGScheduler - Job 21 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:15.011 INFO  TaskSchedulerImpl - Killing all running tasks in stage 31: Stage finished
20:33:15.011 INFO  DAGScheduler - Job 21 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.051434 s
20:33:15.036 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:15.036 INFO  DAGScheduler - Got job 22 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:15.036 INFO  DAGScheduler - Final stage: ResultStage 32 (count at ReadsSparkSinkUnitTest.java:185)
20:33:15.036 INFO  DAGScheduler - Parents of final stage: List()
20:33:15.036 INFO  DAGScheduler - Missing parents: List()
20:33:15.037 INFO  DAGScheduler - Submitting ResultStage 32 (MapPartitionsRDD[70] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:15.059 INFO  MemoryStore - Block broadcast_43 stored as values in memory (estimated size 426.1 KiB, free 1917.1 MiB)
20:33:15.060 INFO  MemoryStore - Block broadcast_43_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.9 MiB)
20:33:15.060 INFO  BlockManagerInfo - Added broadcast_43_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.4 MiB)
20:33:15.061 INFO  SparkContext - Created broadcast 43 from broadcast at DAGScheduler.scala:1580
20:33:15.061 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 32 (MapPartitionsRDD[70] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:15.061 INFO  TaskSchedulerImpl - Adding task set 32.0 with 1 tasks resource profile 0
20:33:15.062 INFO  TaskSetManager - Starting task 0.0 in stage 32.0 (TID 70) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:15.063 INFO  Executor - Running task 0.0 in stage 32.0 (TID 70)
20:33:15.101 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:15.123 INFO  BlockManagerInfo - Removed broadcast_42_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.5 MiB)
20:33:15.127 INFO  BlockManagerInfo - Removed broadcast_36_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.5 MiB)
20:33:15.129 INFO  BlockManagerInfo - Removed broadcast_35_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:15.131 INFO  BlockManagerInfo - Removed broadcast_38_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.7 MiB)
20:33:15.133 INFO  BlockManagerInfo - Removed broadcast_39_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.7 MiB)
20:33:15.135 INFO  BlockManagerInfo - Removed broadcast_37_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:15.147 INFO  Executor - Finished task 0.0 in stage 32.0 (TID 70). 1032 bytes result sent to driver
20:33:15.148 INFO  TaskSetManager - Finished task 0.0 in stage 32.0 (TID 70) in 86 ms on localhost (executor driver) (1/1)
20:33:15.148 INFO  TaskSchedulerImpl - Removed TaskSet 32.0, whose tasks have all completed, from pool 
20:33:15.148 INFO  DAGScheduler - ResultStage 32 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.111 s
20:33:15.149 INFO  DAGScheduler - Job 22 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:15.149 INFO  TaskSchedulerImpl - Killing all running tasks in stage 32: Stage finished
20:33:15.149 INFO  DAGScheduler - Job 22 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.113052 s
20:33:15.159 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:15.159 INFO  DAGScheduler - Got job 23 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:15.160 INFO  DAGScheduler - Final stage: ResultStage 33 (count at ReadsSparkSinkUnitTest.java:185)
20:33:15.160 INFO  DAGScheduler - Parents of final stage: List()
20:33:15.160 INFO  DAGScheduler - Missing parents: List()
20:33:15.161 INFO  DAGScheduler - Submitting ResultStage 33 (MapPartitionsRDD[88] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:15.174 INFO  MemoryStore - Block broadcast_44 stored as values in memory (estimated size 148.1 KiB, free 1918.6 MiB)
20:33:15.175 INFO  MemoryStore - Block broadcast_44_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.6 MiB)
20:33:15.176 INFO  BlockManagerInfo - Added broadcast_44_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.7 MiB)
20:33:15.177 INFO  SparkContext - Created broadcast 44 from broadcast at DAGScheduler.scala:1580
20:33:15.177 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 33 (MapPartitionsRDD[88] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:15.177 INFO  TaskSchedulerImpl - Adding task set 33.0 with 1 tasks resource profile 0
20:33:15.178 INFO  TaskSetManager - Starting task 0.0 in stage 33.0 (TID 71) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:15.179 INFO  Executor - Running task 0.0 in stage 33.0 (TID 71)
20:33:15.209 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam:0+237038
20:33:15.210 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:15.212 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam	dst=null	perm=null	proto=rpc
20:33:15.214 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:15.214 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:15.215 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_163ec16a-7724-4ecd-85e9-b445854d7b5e.bam.bai	dst=null	perm=null	proto=rpc
20:33:15.218 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:15.221 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:15.223 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:15.226 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:15.228 INFO  Executor - Finished task 0.0 in stage 33.0 (TID 71). 989 bytes result sent to driver
20:33:15.230 INFO  TaskSetManager - Finished task 0.0 in stage 33.0 (TID 71) in 52 ms on localhost (executor driver) (1/1)
20:33:15.230 INFO  TaskSchedulerImpl - Removed TaskSet 33.0, whose tasks have all completed, from pool 
20:33:15.230 INFO  DAGScheduler - ResultStage 33 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.069 s
20:33:15.230 INFO  DAGScheduler - Job 23 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:15.230 INFO  TaskSchedulerImpl - Killing all running tasks in stage 33: Stage finished
20:33:15.231 INFO  DAGScheduler - Job 23 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.071573 s
20:33:15.237 INFO  MemoryStore - Block broadcast_45 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:15.250 INFO  MemoryStore - Block broadcast_45_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:15.250 INFO  BlockManagerInfo - Added broadcast_45_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:15.251 INFO  SparkContext - Created broadcast 45 from newAPIHadoopFile at PathSplitSource.java:96
20:33:15.299 INFO  MemoryStore - Block broadcast_46 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:15.307 INFO  MemoryStore - Block broadcast_46_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:15.307 INFO  BlockManagerInfo - Added broadcast_46_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:15.307 INFO  SparkContext - Created broadcast 46 from newAPIHadoopFile at PathSplitSource.java:96
20:33:15.334 INFO  FileInputFormat - Total input files to process : 1
20:33:15.338 INFO  MemoryStore - Block broadcast_47 stored as values in memory (estimated size 160.7 KiB, free 1917.7 MiB)
20:33:15.341 INFO  MemoryStore - Block broadcast_47_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:15.341 INFO  BlockManagerInfo - Added broadcast_47_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:15.341 INFO  SparkContext - Created broadcast 47 from broadcast at ReadsSparkSink.java:133
20:33:15.343 INFO  MemoryStore - Block broadcast_48 stored as values in memory (estimated size 163.2 KiB, free 1917.5 MiB)
20:33:15.345 INFO  MemoryStore - Block broadcast_48_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:15.346 INFO  BlockManagerInfo - Added broadcast_48_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:15.346 INFO  SparkContext - Created broadcast 48 from broadcast at BamSink.java:76
20:33:15.349 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts	dst=null	perm=null	proto=rpc
20:33:15.350 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:15.350 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:15.350 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:15.351 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:15.365 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:15.365 INFO  DAGScheduler - Registering RDD 102 (mapToPair at SparkUtils.java:161) as input to shuffle 8
20:33:15.366 INFO  DAGScheduler - Got job 24 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:15.366 INFO  DAGScheduler - Final stage: ResultStage 35 (runJob at SparkHadoopWriter.scala:83)
20:33:15.366 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 34)
20:33:15.366 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 34)
20:33:15.366 INFO  DAGScheduler - Submitting ShuffleMapStage 34 (MapPartitionsRDD[102] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:15.390 INFO  MemoryStore - Block broadcast_49 stored as values in memory (estimated size 520.4 KiB, free 1917.0 MiB)
20:33:15.393 INFO  MemoryStore - Block broadcast_49_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.9 MiB)
20:33:15.394 INFO  BlockManagerInfo - Added broadcast_49_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:15.396 INFO  SparkContext - Created broadcast 49 from broadcast at DAGScheduler.scala:1580
20:33:15.396 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 34 (MapPartitionsRDD[102] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:15.397 INFO  TaskSchedulerImpl - Adding task set 34.0 with 1 tasks resource profile 0
20:33:15.399 INFO  TaskSetManager - Starting task 0.0 in stage 34.0 (TID 72) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:15.400 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741826_1002 replica FinalizedReplica, blk_1073741826_1002, FINALIZED
  getNumBytes()     = 212
  getBytesOnDisk()  = 212
  getVisibleLength()= 212
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741826 for deletion
20:33:15.401 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741827_1003 replica FinalizedReplica, blk_1073741827_1003, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741827 for deletion
20:33:15.401 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741826_1002 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741826
20:33:15.401 INFO  Executor - Running task 0.0 in stage 34.0 (TID 72)
20:33:15.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741827_1003 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741827
20:33:15.469 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:15.496 INFO  Executor - Finished task 0.0 in stage 34.0 (TID 72). 1148 bytes result sent to driver
20:33:15.497 INFO  TaskSetManager - Finished task 0.0 in stage 34.0 (TID 72) in 98 ms on localhost (executor driver) (1/1)
20:33:15.497 INFO  TaskSchedulerImpl - Removed TaskSet 34.0, whose tasks have all completed, from pool 
20:33:15.497 INFO  DAGScheduler - ShuffleMapStage 34 (mapToPair at SparkUtils.java:161) finished in 0.130 s
20:33:15.497 INFO  DAGScheduler - looking for newly runnable stages
20:33:15.498 INFO  DAGScheduler - running: HashSet()
20:33:15.498 INFO  DAGScheduler - waiting: HashSet(ResultStage 35)
20:33:15.498 INFO  DAGScheduler - failed: HashSet()
20:33:15.498 INFO  DAGScheduler - Submitting ResultStage 35 (MapPartitionsRDD[107] at mapToPair at BamSink.java:91), which has no missing parents
20:33:15.512 INFO  MemoryStore - Block broadcast_50 stored as values in memory (estimated size 241.5 KiB, free 1916.6 MiB)
20:33:15.513 INFO  MemoryStore - Block broadcast_50_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1916.6 MiB)
20:33:15.513 INFO  BlockManagerInfo - Added broadcast_50_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.4 MiB)
20:33:15.514 INFO  SparkContext - Created broadcast 50 from broadcast at DAGScheduler.scala:1580
20:33:15.514 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 35 (MapPartitionsRDD[107] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:15.514 INFO  TaskSchedulerImpl - Adding task set 35.0 with 1 tasks resource profile 0
20:33:15.515 INFO  TaskSetManager - Starting task 0.0 in stage 35.0 (TID 73) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:15.515 INFO  Executor - Running task 0.0 in stage 35.0 (TID 73)
20:33:15.524 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:15.525 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:15.548 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:15.548 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:15.548 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:15.549 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:15.549 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:15.549 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:15.551 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:15.552 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:15.554 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:15.557 INFO  StateChange - BLOCK* allocate blk_1073741832_1008, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/part-r-00000
20:33:15.559 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741832_1008 src: /127.0.0.1:59746 dest: /127.0.0.1:35765
20:33:15.564 INFO  clienttrace - src: /127.0.0.1:59746, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741832_1008, duration(ns): 3369659
20:33:15.564 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741832_1008, type=LAST_IN_PIPELINE terminating
20:33:15.565 INFO  FSNamesystem - BLOCK* blk_1073741832_1008 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/part-r-00000
20:33:15.966 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:15.967 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:15.968 INFO  StateChange - BLOCK* allocate blk_1073741833_1009, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.sbi
20:33:15.970 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741833_1009 src: /127.0.0.1:59752 dest: /127.0.0.1:35765
20:33:15.971 INFO  clienttrace - src: /127.0.0.1:59752, dest: /127.0.0.1:35765, bytes: 13492, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741833_1009, duration(ns): 621886
20:33:15.971 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741833_1009, type=LAST_IN_PIPELINE terminating
20:33:15.973 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:15.976 INFO  StateChange - BLOCK* allocate blk_1073741834_1010, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.bai
20:33:15.977 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741834_1010 src: /127.0.0.1:59766 dest: /127.0.0.1:35765
20:33:15.978 INFO  clienttrace - src: /127.0.0.1:59766, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741834_1010, duration(ns): 582738
20:33:15.978 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741834_1010, type=LAST_IN_PIPELINE terminating
20:33:15.980 INFO  FSNamesystem - BLOCK* blk_1073741834_1010 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.bai
20:33:16.381 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.383 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0	dst=null	perm=null	proto=rpc
20:33:16.384 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0	dst=null	perm=null	proto=rpc
20:33:16.385 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000	dst=null	perm=null	proto=rpc
20:33:16.386 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/_temporary/attempt_202507152033158067728467864346650_0107_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:16.387 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033158067728467864346650_0107_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000
20:33:16.387 INFO  SparkHadoopMapRedUtil - attempt_202507152033158067728467864346650_0107_r_000000_0: Committed. Elapsed time: 3 ms.
20:33:16.388 INFO  Executor - Finished task 0.0 in stage 35.0 (TID 73). 1858 bytes result sent to driver
20:33:16.390 INFO  TaskSetManager - Finished task 0.0 in stage 35.0 (TID 73) in 875 ms on localhost (executor driver) (1/1)
20:33:16.390 INFO  TaskSchedulerImpl - Removed TaskSet 35.0, whose tasks have all completed, from pool 
20:33:16.390 INFO  DAGScheduler - ResultStage 35 (runJob at SparkHadoopWriter.scala:83) finished in 0.892 s
20:33:16.390 INFO  DAGScheduler - Job 24 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:16.390 INFO  TaskSchedulerImpl - Killing all running tasks in stage 35: Stage finished
20:33:16.390 INFO  DAGScheduler - Job 24 finished: runJob at SparkHadoopWriter.scala:83, took 1.025772 s
20:33:16.392 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033158067728467864346650_0107.
20:33:16.392 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:16.393 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts	dst=null	perm=null	proto=rpc
20:33:16.394 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000	dst=null	perm=null	proto=rpc
20:33:16.395 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:16.396 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.397 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:16.398 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.398 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:16.399 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary/0/task_202507152033158067728467864346650_0107_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.400 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:16.401 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.402 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.403 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.spark-staging-107	dst=null	perm=null	proto=rpc
20:33:16.403 INFO  SparkHadoopWriter - Write Job job_202507152033158067728467864346650_0107 committed. Elapsed time: 11 ms.
20:33:16.404 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.406 INFO  StateChange - BLOCK* allocate blk_1073741835_1011, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/header
20:33:16.407 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741835_1011 src: /127.0.0.1:59780 dest: /127.0.0.1:35765
20:33:16.408 INFO  clienttrace - src: /127.0.0.1:59780, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741835_1011, duration(ns): 638778
20:33:16.409 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741835_1011, type=LAST_IN_PIPELINE terminating
20:33:16.410 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.411 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.412 INFO  StateChange - BLOCK* allocate blk_1073741836_1012, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/terminator
20:33:16.413 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741836_1012 src: /127.0.0.1:59792 dest: /127.0.0.1:35765
20:33:16.414 INFO  clienttrace - src: /127.0.0.1:59792, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741836_1012, duration(ns): 498100
20:33:16.414 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741836_1012, type=LAST_IN_PIPELINE terminating
20:33:16.415 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.416 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts	dst=null	perm=null	proto=rpc
20:33:16.418 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.419 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.419 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam
20:33:16.420 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.421 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.422 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.422 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam done
20:33:16.422 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.423 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi
20:33:16.423 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts	dst=null	perm=null	proto=rpc
20:33:16.424 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.425 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:16.426 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:16.429 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:16.430 INFO  StateChange - BLOCK* allocate blk_1073741837_1013, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi
20:33:16.431 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741837_1013 src: /127.0.0.1:59794 dest: /127.0.0.1:35765
20:33:16.433 INFO  clienttrace - src: /127.0.0.1:59794, dest: /127.0.0.1:35765, bytes: 13492, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741837_1013, duration(ns): 1345625
20:33:16.433 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741837_1013, type=LAST_IN_PIPELINE terminating
20:33:16.434 INFO  FSNamesystem - BLOCK* blk_1073741837_1013 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi
20:33:16.835 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.836 INFO  IndexFileMerger - Done merging .sbi files
20:33:16.836 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai
20:33:16.837 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts	dst=null	perm=null	proto=rpc
20:33:16.838 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:16.839 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:16.840 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:16.842 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:16.843 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:16.850 INFO  StateChange - BLOCK* allocate blk_1073741838_1014, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai
20:33:16.851 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741838_1014 src: /127.0.0.1:59804 dest: /127.0.0.1:35765
20:33:16.853 INFO  clienttrace - src: /127.0.0.1:59804, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741838_1014, duration(ns): 868039
20:33:16.853 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741838_1014, type=LAST_IN_PIPELINE terminating
20:33:16.854 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:16.855 INFO  IndexFileMerger - Done merging .bai files
20:33:16.856 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.parts	dst=null	perm=null	proto=rpc
20:33:16.866 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.874 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=null	proto=rpc
20:33:16.875 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=null	proto=rpc
20:33:16.875 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=null	proto=rpc
20:33:16.877 WARN  DFSUtil - Unexpected value for data transfer bytes=13600 duration=0
20:33:16.878 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.878 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.879 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.880 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.882 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.883 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.884 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.886 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:16.890 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:16.890 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:16.891 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:16.891 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=null	proto=rpc
20:33:16.892 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=null	proto=rpc
20:33:16.893 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.sbi	dst=null	perm=null	proto=rpc
20:33:16.894 WARN  DFSUtil - Unexpected value for data transfer bytes=13600 duration=0
20:33:16.895 INFO  MemoryStore - Block broadcast_51 stored as values in memory (estimated size 13.3 KiB, free 1916.6 MiB)
20:33:16.896 INFO  MemoryStore - Block broadcast_51_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1916.5 MiB)
20:33:16.896 INFO  BlockManagerInfo - Added broadcast_51_piece0 in memory on localhost:45281 (size: 8.3 KiB, free: 1919.3 MiB)
20:33:16.897 INFO  SparkContext - Created broadcast 51 from broadcast at BamSource.java:104
20:33:16.899 INFO  MemoryStore - Block broadcast_52 stored as values in memory (estimated size 297.9 KiB, free 1916.3 MiB)
20:33:16.909 INFO  MemoryStore - Block broadcast_52_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.2 MiB)
20:33:16.909 INFO  BlockManagerInfo - Added broadcast_52_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:16.909 INFO  SparkContext - Created broadcast 52 from newAPIHadoopFile at PathSplitSource.java:96
20:33:16.922 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.922 INFO  FileInputFormat - Total input files to process : 1
20:33:16.923 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.951 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:16.952 INFO  DAGScheduler - Got job 25 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:16.952 INFO  DAGScheduler - Final stage: ResultStage 36 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:16.952 INFO  DAGScheduler - Parents of final stage: List()
20:33:16.952 INFO  DAGScheduler - Missing parents: List()
20:33:16.952 INFO  DAGScheduler - Submitting ResultStage 36 (MapPartitionsRDD[113] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:16.960 INFO  MemoryStore - Block broadcast_53 stored as values in memory (estimated size 148.2 KiB, free 1916.1 MiB)
20:33:16.961 INFO  MemoryStore - Block broadcast_53_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1916.0 MiB)
20:33:16.961 INFO  BlockManagerInfo - Added broadcast_53_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.2 MiB)
20:33:16.962 INFO  SparkContext - Created broadcast 53 from broadcast at DAGScheduler.scala:1580
20:33:16.962 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 36 (MapPartitionsRDD[113] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:16.962 INFO  TaskSchedulerImpl - Adding task set 36.0 with 1 tasks resource profile 0
20:33:16.963 INFO  TaskSetManager - Starting task 0.0 in stage 36.0 (TID 74) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:16.963 INFO  Executor - Running task 0.0 in stage 36.0 (TID 74)
20:33:16.977 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam:0+237038
20:33:16.978 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.979 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:16.981 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.981 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.982 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:16.985 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:16.988 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:16.991 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:16.995 INFO  Executor - Finished task 0.0 in stage 36.0 (TID 74). 651526 bytes result sent to driver
20:33:16.998 INFO  TaskSetManager - Finished task 0.0 in stage 36.0 (TID 74) in 36 ms on localhost (executor driver) (1/1)
20:33:16.998 INFO  TaskSchedulerImpl - Removed TaskSet 36.0, whose tasks have all completed, from pool 
20:33:16.998 INFO  DAGScheduler - ResultStage 36 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.045 s
20:33:16.998 INFO  DAGScheduler - Job 25 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:16.998 INFO  TaskSchedulerImpl - Killing all running tasks in stage 36: Stage finished
20:33:16.998 INFO  DAGScheduler - Job 25 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.046820 s
20:33:17.016 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:17.016 INFO  DAGScheduler - Got job 26 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:17.016 INFO  DAGScheduler - Final stage: ResultStage 37 (count at ReadsSparkSinkUnitTest.java:185)
20:33:17.016 INFO  DAGScheduler - Parents of final stage: List()
20:33:17.016 INFO  DAGScheduler - Missing parents: List()
20:33:17.016 INFO  DAGScheduler - Submitting ResultStage 37 (MapPartitionsRDD[95] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:17.035 INFO  MemoryStore - Block broadcast_54 stored as values in memory (estimated size 426.1 KiB, free 1915.6 MiB)
20:33:17.037 INFO  MemoryStore - Block broadcast_54_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.4 MiB)
20:33:17.037 INFO  BlockManagerInfo - Added broadcast_54_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.1 MiB)
20:33:17.037 INFO  SparkContext - Created broadcast 54 from broadcast at DAGScheduler.scala:1580
20:33:17.038 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 37 (MapPartitionsRDD[95] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:17.038 INFO  TaskSchedulerImpl - Adding task set 37.0 with 1 tasks resource profile 0
20:33:17.039 INFO  TaskSetManager - Starting task 0.0 in stage 37.0 (TID 75) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:17.039 INFO  Executor - Running task 0.0 in stage 37.0 (TID 75)
20:33:17.077 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:17.091 INFO  Executor - Finished task 0.0 in stage 37.0 (TID 75). 989 bytes result sent to driver
20:33:17.091 INFO  TaskSetManager - Finished task 0.0 in stage 37.0 (TID 75) in 53 ms on localhost (executor driver) (1/1)
20:33:17.091 INFO  TaskSchedulerImpl - Removed TaskSet 37.0, whose tasks have all completed, from pool 
20:33:17.092 INFO  DAGScheduler - ResultStage 37 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.075 s
20:33:17.092 INFO  DAGScheduler - Job 26 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:17.092 INFO  TaskSchedulerImpl - Killing all running tasks in stage 37: Stage finished
20:33:17.092 INFO  DAGScheduler - Job 26 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.076723 s
20:33:17.097 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:17.098 INFO  DAGScheduler - Got job 27 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:17.098 INFO  DAGScheduler - Final stage: ResultStage 38 (count at ReadsSparkSinkUnitTest.java:185)
20:33:17.098 INFO  DAGScheduler - Parents of final stage: List()
20:33:17.098 INFO  DAGScheduler - Missing parents: List()
20:33:17.098 INFO  DAGScheduler - Submitting ResultStage 38 (MapPartitionsRDD[113] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:17.105 INFO  MemoryStore - Block broadcast_55 stored as values in memory (estimated size 148.1 KiB, free 1915.3 MiB)
20:33:17.117 INFO  MemoryStore - Block broadcast_55_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1915.2 MiB)
20:33:17.118 INFO  BlockManagerInfo - Added broadcast_55_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.0 MiB)
20:33:17.118 INFO  BlockManagerInfo - Removed broadcast_49_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.2 MiB)
20:33:17.118 INFO  SparkContext - Created broadcast 55 from broadcast at DAGScheduler.scala:1580
20:33:17.119 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 38 (MapPartitionsRDD[113] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:17.119 INFO  TaskSchedulerImpl - Adding task set 38.0 with 1 tasks resource profile 0
20:33:17.119 INFO  BlockManagerInfo - Removed broadcast_34_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:17.120 INFO  TaskSetManager - Starting task 0.0 in stage 38.0 (TID 76) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:17.121 INFO  Executor - Running task 0.0 in stage 38.0 (TID 76)
20:33:17.122 INFO  BlockManagerInfo - Removed broadcast_47_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:17.123 INFO  BlockManagerInfo - Removed broadcast_44_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.3 MiB)
20:33:17.124 INFO  BlockManagerInfo - Removed broadcast_54_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.5 MiB)
20:33:17.125 INFO  BlockManagerInfo - Removed broadcast_46_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:17.126 INFO  BlockManagerInfo - Removed broadcast_50_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.6 MiB)
20:33:17.126 INFO  BlockManagerInfo - Removed broadcast_41_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:17.127 INFO  BlockManagerInfo - Removed broadcast_43_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:17.128 INFO  BlockManagerInfo - Removed broadcast_40_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.8 MiB)
20:33:17.130 INFO  BlockManagerInfo - Removed broadcast_48_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:17.130 INFO  BlockManagerInfo - Removed broadcast_53_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.8 MiB)
20:33:17.139 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam:0+237038
20:33:17.141 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:17.142 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam	dst=null	perm=null	proto=rpc
20:33:17.143 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:17.144 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:17.144 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_0e1f89f2-c115-479d-baf1-4e39541de772.bam.bai	dst=null	perm=null	proto=rpc
20:33:17.150 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:17.150 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:17.157 INFO  Executor - Finished task 0.0 in stage 38.0 (TID 76). 989 bytes result sent to driver
20:33:17.157 INFO  TaskSetManager - Finished task 0.0 in stage 38.0 (TID 76) in 37 ms on localhost (executor driver) (1/1)
20:33:17.157 INFO  TaskSchedulerImpl - Removed TaskSet 38.0, whose tasks have all completed, from pool 
20:33:17.158 INFO  DAGScheduler - ResultStage 38 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.059 s
20:33:17.158 INFO  DAGScheduler - Job 27 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:17.158 INFO  TaskSchedulerImpl - Killing all running tasks in stage 38: Stage finished
20:33:17.158 INFO  DAGScheduler - Job 27 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.060734 s
20:33:17.163 INFO  MemoryStore - Block broadcast_56 stored as values in memory (estimated size 297.9 KiB, free 1918.8 MiB)
20:33:17.174 INFO  MemoryStore - Block broadcast_56_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.8 MiB)
20:33:17.174 INFO  BlockManagerInfo - Added broadcast_56_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:17.175 INFO  SparkContext - Created broadcast 56 from newAPIHadoopFile at PathSplitSource.java:96
20:33:17.201 INFO  MemoryStore - Block broadcast_57 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:17.207 INFO  MemoryStore - Block broadcast_57_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:17.208 INFO  BlockManagerInfo - Added broadcast_57_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:17.208 INFO  SparkContext - Created broadcast 57 from newAPIHadoopFile at PathSplitSource.java:96
20:33:17.230 INFO  FileInputFormat - Total input files to process : 1
20:33:17.234 INFO  MemoryStore - Block broadcast_58 stored as values in memory (estimated size 160.7 KiB, free 1918.3 MiB)
20:33:17.235 INFO  MemoryStore - Block broadcast_58_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.3 MiB)
20:33:17.235 INFO  BlockManagerInfo - Added broadcast_58_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:17.236 INFO  SparkContext - Created broadcast 58 from broadcast at ReadsSparkSink.java:133
20:33:17.236 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:17.238 INFO  MemoryStore - Block broadcast_59 stored as values in memory (estimated size 163.2 KiB, free 1918.1 MiB)
20:33:17.240 INFO  MemoryStore - Block broadcast_59_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.1 MiB)
20:33:17.240 INFO  BlockManagerInfo - Added broadcast_59_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:17.241 INFO  SparkContext - Created broadcast 59 from broadcast at BamSink.java:76
20:33:17.244 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts	dst=null	perm=null	proto=rpc
20:33:17.245 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:17.245 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:17.245 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:17.246 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:17.257 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:17.258 INFO  DAGScheduler - Registering RDD 127 (mapToPair at SparkUtils.java:161) as input to shuffle 9
20:33:17.259 INFO  DAGScheduler - Got job 28 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:17.259 INFO  DAGScheduler - Final stage: ResultStage 40 (runJob at SparkHadoopWriter.scala:83)
20:33:17.259 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 39)
20:33:17.259 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 39)
20:33:17.259 INFO  DAGScheduler - Submitting ShuffleMapStage 39 (MapPartitionsRDD[127] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:17.279 INFO  MemoryStore - Block broadcast_60 stored as values in memory (estimated size 520.4 KiB, free 1917.6 MiB)
20:33:17.281 INFO  MemoryStore - Block broadcast_60_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.4 MiB)
20:33:17.281 INFO  BlockManagerInfo - Added broadcast_60_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.6 MiB)
20:33:17.282 INFO  SparkContext - Created broadcast 60 from broadcast at DAGScheduler.scala:1580
20:33:17.282 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 39 (MapPartitionsRDD[127] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:17.282 INFO  TaskSchedulerImpl - Adding task set 39.0 with 1 tasks resource profile 0
20:33:17.283 INFO  TaskSetManager - Starting task 0.0 in stage 39.0 (TID 77) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:17.283 INFO  Executor - Running task 0.0 in stage 39.0 (TID 77)
20:33:17.323 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:17.346 INFO  Executor - Finished task 0.0 in stage 39.0 (TID 77). 1148 bytes result sent to driver
20:33:17.347 INFO  TaskSetManager - Finished task 0.0 in stage 39.0 (TID 77) in 64 ms on localhost (executor driver) (1/1)
20:33:17.347 INFO  TaskSchedulerImpl - Removed TaskSet 39.0, whose tasks have all completed, from pool 
20:33:17.347 INFO  DAGScheduler - ShuffleMapStage 39 (mapToPair at SparkUtils.java:161) finished in 0.087 s
20:33:17.347 INFO  DAGScheduler - looking for newly runnable stages
20:33:17.347 INFO  DAGScheduler - running: HashSet()
20:33:17.347 INFO  DAGScheduler - waiting: HashSet(ResultStage 40)
20:33:17.347 INFO  DAGScheduler - failed: HashSet()
20:33:17.348 INFO  DAGScheduler - Submitting ResultStage 40 (MapPartitionsRDD[132] at mapToPair at BamSink.java:91), which has no missing parents
20:33:17.356 INFO  MemoryStore - Block broadcast_61 stored as values in memory (estimated size 241.5 KiB, free 1917.2 MiB)
20:33:17.357 INFO  MemoryStore - Block broadcast_61_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1917.1 MiB)
20:33:17.357 INFO  BlockManagerInfo - Added broadcast_61_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.5 MiB)
20:33:17.358 INFO  SparkContext - Created broadcast 61 from broadcast at DAGScheduler.scala:1580
20:33:17.358 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 40 (MapPartitionsRDD[132] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:17.358 INFO  TaskSchedulerImpl - Adding task set 40.0 with 1 tasks resource profile 0
20:33:17.358 INFO  TaskSetManager - Starting task 0.0 in stage 40.0 (TID 78) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:17.359 INFO  Executor - Running task 0.0 in stage 40.0 (TID 78)
20:33:17.366 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:17.367 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:17.385 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:17.385 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:17.385 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:17.385 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:17.385 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:17.385 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:17.387 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:17.388 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:17.392 INFO  StateChange - BLOCK* allocate blk_1073741839_1015, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/part-r-00000
20:33:17.394 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741839_1015 src: /127.0.0.1:43744 dest: /127.0.0.1:35765
20:33:17.397 INFO  clienttrace - src: /127.0.0.1:43744, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741839_1015, duration(ns): 2747413
20:33:17.397 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741839_1015, type=LAST_IN_PIPELINE terminating
20:33:17.398 INFO  FSNamesystem - BLOCK* blk_1073741839_1015 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/part-r-00000
20:33:17.799 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:17.800 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:17.803 INFO  StateChange - BLOCK* allocate blk_1073741840_1016, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/.part-r-00000.bai
20:33:17.804 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741840_1016 src: /127.0.0.1:43756 dest: /127.0.0.1:35765
20:33:17.806 INFO  clienttrace - src: /127.0.0.1:43756, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741840_1016, duration(ns): 710575
20:33:17.806 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741840_1016, type=LAST_IN_PIPELINE terminating
20:33:17.807 INFO  FSNamesystem - BLOCK* blk_1073741840_1016 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/.part-r-00000.bai
20:33:18.208 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:18.209 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0	dst=null	perm=null	proto=rpc
20:33:18.210 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0	dst=null	perm=null	proto=rpc
20:33:18.211 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/task_202507152033175735561367774544019_0132_r_000000	dst=null	perm=null	proto=rpc
20:33:18.212 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/_temporary/attempt_202507152033175735561367774544019_0132_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/task_202507152033175735561367774544019_0132_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:18.212 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033175735561367774544019_0132_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/task_202507152033175735561367774544019_0132_r_000000
20:33:18.212 INFO  SparkHadoopMapRedUtil - attempt_202507152033175735561367774544019_0132_r_000000_0: Committed. Elapsed time: 2 ms.
20:33:18.213 INFO  Executor - Finished task 0.0 in stage 40.0 (TID 78). 1858 bytes result sent to driver
20:33:18.215 INFO  TaskSetManager - Finished task 0.0 in stage 40.0 (TID 78) in 857 ms on localhost (executor driver) (1/1)
20:33:18.215 INFO  TaskSchedulerImpl - Removed TaskSet 40.0, whose tasks have all completed, from pool 
20:33:18.215 INFO  DAGScheduler - ResultStage 40 (runJob at SparkHadoopWriter.scala:83) finished in 0.867 s
20:33:18.215 INFO  DAGScheduler - Job 28 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:18.215 INFO  TaskSchedulerImpl - Killing all running tasks in stage 40: Stage finished
20:33:18.216 INFO  DAGScheduler - Job 28 finished: runJob at SparkHadoopWriter.scala:83, took 0.958058 s
20:33:18.217 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033175735561367774544019_0132.
20:33:18.217 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:18.218 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts	dst=null	perm=null	proto=rpc
20:33:18.219 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/task_202507152033175735561367774544019_0132_r_000000	dst=null	perm=null	proto=rpc
20:33:18.219 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:18.220 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/task_202507152033175735561367774544019_0132_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.221 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:18.222 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary/0/task_202507152033175735561367774544019_0132_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.222 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:18.223 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.224 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:18.226 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/.spark-staging-132	dst=null	perm=null	proto=rpc
20:33:18.226 INFO  SparkHadoopWriter - Write Job job_202507152033175735561367774544019_0132 committed. Elapsed time: 9 ms.
20:33:18.226 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.229 INFO  StateChange - BLOCK* allocate blk_1073741841_1017, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/header
20:33:18.230 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741841_1017 src: /127.0.0.1:43766 dest: /127.0.0.1:35765
20:33:18.232 INFO  clienttrace - src: /127.0.0.1:43766, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741841_1017, duration(ns): 802101
20:33:18.232 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741841_1017, type=LAST_IN_PIPELINE terminating
20:33:18.233 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:18.234 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.235 INFO  StateChange - BLOCK* allocate blk_1073741842_1018, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/terminator
20:33:18.236 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741842_1018 src: /127.0.0.1:43772 dest: /127.0.0.1:35765
20:33:18.237 INFO  clienttrace - src: /127.0.0.1:43772, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741842_1018, duration(ns): 479254
20:33:18.237 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741842_1018, type=LAST_IN_PIPELINE terminating
20:33:18.238 INFO  FSNamesystem - BLOCK* blk_1073741842_1018 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/terminator
20:33:18.393 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741833_1009 replica FinalizedReplica, blk_1073741833_1009, FINALIZED
  getNumBytes()     = 13492
  getBytesOnDisk()  = 13492
  getVisibleLength()= 13492
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741833 for deletion
20:33:18.393 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741834_1010 replica FinalizedReplica, blk_1073741834_1010, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741834 for deletion
20:33:18.393 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741833_1009 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741833
20:33:18.393 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741834_1010 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741834
20:33:18.639 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:18.640 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts	dst=null	perm=null	proto=rpc
20:33:18.641 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.642 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:18.643 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam
20:33:18.643 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.644 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:18.644 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.645 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam done
20:33:18.645 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:18.645 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai
20:33:18.646 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts	dst=null	perm=null	proto=rpc
20:33:18.647 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:18.648 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:18.649 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:18.652 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:18.654 INFO  StateChange - BLOCK* allocate blk_1073741843_1019, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai
20:33:18.655 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741843_1019 src: /127.0.0.1:43776 dest: /127.0.0.1:35765
20:33:18.657 INFO  clienttrace - src: /127.0.0.1:43776, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741843_1019, duration(ns): 606742
20:33:18.657 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741843_1019, type=LAST_IN_PIPELINE terminating
20:33:18.658 INFO  FSNamesystem - BLOCK* blk_1073741843_1019 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai
20:33:19.059 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:19.059 INFO  IndexFileMerger - Done merging .bai files
20:33:19.060 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.parts	dst=null	perm=null	proto=rpc
20:33:19.070 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.071 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.071 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.072 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.073 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.075 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.075 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.079 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.082 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.085 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.087 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.087 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.088 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.sbi	dst=null	perm=null	proto=rpc
20:33:19.091 INFO  MemoryStore - Block broadcast_62 stored as values in memory (estimated size 297.9 KiB, free 1916.8 MiB)
20:33:19.102 INFO  MemoryStore - Block broadcast_62_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.8 MiB)
20:33:19.103 INFO  BlockManagerInfo - Added broadcast_62_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:19.103 INFO  SparkContext - Created broadcast 62 from newAPIHadoopFile at PathSplitSource.java:96
20:33:19.126 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.127 INFO  FileInputFormat - Total input files to process : 1
20:33:19.127 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.165 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:19.165 INFO  DAGScheduler - Got job 29 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:19.165 INFO  DAGScheduler - Final stage: ResultStage 41 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:19.165 INFO  DAGScheduler - Parents of final stage: List()
20:33:19.165 INFO  DAGScheduler - Missing parents: List()
20:33:19.165 INFO  DAGScheduler - Submitting ResultStage 41 (MapPartitionsRDD[139] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:19.185 INFO  MemoryStore - Block broadcast_63 stored as values in memory (estimated size 426.2 KiB, free 1916.4 MiB)
20:33:19.186 INFO  MemoryStore - Block broadcast_63_piece0 stored as bytes in memory (estimated size 153.7 KiB, free 1916.2 MiB)
20:33:19.186 INFO  BlockManagerInfo - Added broadcast_63_piece0 in memory on localhost:45281 (size: 153.7 KiB, free: 1919.3 MiB)
20:33:19.187 INFO  SparkContext - Created broadcast 63 from broadcast at DAGScheduler.scala:1580
20:33:19.187 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 41 (MapPartitionsRDD[139] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:19.187 INFO  TaskSchedulerImpl - Adding task set 41.0 with 1 tasks resource profile 0
20:33:19.188 INFO  TaskSetManager - Starting task 0.0 in stage 41.0 (TID 79) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:19.189 INFO  Executor - Running task 0.0 in stage 41.0 (TID 79)
20:33:19.233 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam:0+237038
20:33:19.234 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.235 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.237 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.238 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.238 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.239 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.240 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.241 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.243 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.246 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.247 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.248 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.249 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.250 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.250 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.256 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.258 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.261 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.261 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.262 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.263 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.264 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.266 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.267 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.269 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.269 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.270 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.273 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.274 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.275 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.276 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.277 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.278 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.279 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.281 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.282 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.283 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.286 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.287 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.288 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.291 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.291 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.292 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.293 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.294 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.295 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.296 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.297 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.298 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.299 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.300 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.301 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.305 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.306 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.307 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.308 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.310 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.313 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.314 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.315 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.315 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.316 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.317 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.319 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.320 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.322 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.324 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.325 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.326 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.326 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.327 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.329 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.332 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.333 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.334 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.335 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:19.338 INFO  Executor - Finished task 0.0 in stage 41.0 (TID 79). 651526 bytes result sent to driver
20:33:19.342 INFO  TaskSetManager - Finished task 0.0 in stage 41.0 (TID 79) in 154 ms on localhost (executor driver) (1/1)
20:33:19.342 INFO  TaskSchedulerImpl - Removed TaskSet 41.0, whose tasks have all completed, from pool 
20:33:19.343 INFO  DAGScheduler - ResultStage 41 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.177 s
20:33:19.343 INFO  DAGScheduler - Job 29 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:19.343 INFO  TaskSchedulerImpl - Killing all running tasks in stage 41: Stage finished
20:33:19.343 INFO  DAGScheduler - Job 29 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.178321 s
20:33:19.363 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:19.363 INFO  DAGScheduler - Got job 30 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:19.363 INFO  DAGScheduler - Final stage: ResultStage 42 (count at ReadsSparkSinkUnitTest.java:185)
20:33:19.363 INFO  DAGScheduler - Parents of final stage: List()
20:33:19.364 INFO  DAGScheduler - Missing parents: List()
20:33:19.364 INFO  DAGScheduler - Submitting ResultStage 42 (MapPartitionsRDD[120] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:19.380 INFO  BlockManagerInfo - Removed broadcast_59_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:19.380 INFO  BlockManagerInfo - Removed broadcast_58_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:19.382 INFO  BlockManagerInfo - Removed broadcast_60_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:19.383 INFO  BlockManagerInfo - Removed broadcast_61_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.5 MiB)
20:33:19.384 INFO  BlockManagerInfo - Removed broadcast_57_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:19.385 INFO  BlockManagerInfo - Removed broadcast_51_piece0 on localhost:45281 in memory (size: 8.3 KiB, free: 1919.6 MiB)
20:33:19.387 INFO  BlockManagerInfo - Removed broadcast_45_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:19.389 INFO  BlockManagerInfo - Removed broadcast_63_piece0 on localhost:45281 in memory (size: 153.7 KiB, free: 1919.8 MiB)
20:33:19.390 INFO  BlockManagerInfo - Removed broadcast_52_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:19.391 INFO  BlockManagerInfo - Removed broadcast_55_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.9 MiB)
20:33:19.399 INFO  MemoryStore - Block broadcast_64 stored as values in memory (estimated size 426.1 KiB, free 1918.9 MiB)
20:33:19.401 INFO  MemoryStore - Block broadcast_64_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.8 MiB)
20:33:19.401 INFO  BlockManagerInfo - Added broadcast_64_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.8 MiB)
20:33:19.401 INFO  SparkContext - Created broadcast 64 from broadcast at DAGScheduler.scala:1580
20:33:19.401 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 42 (MapPartitionsRDD[120] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:19.401 INFO  TaskSchedulerImpl - Adding task set 42.0 with 1 tasks resource profile 0
20:33:19.402 INFO  TaskSetManager - Starting task 0.0 in stage 42.0 (TID 80) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:19.403 INFO  Executor - Running task 0.0 in stage 42.0 (TID 80)
20:33:19.441 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:19.454 INFO  Executor - Finished task 0.0 in stage 42.0 (TID 80). 989 bytes result sent to driver
20:33:19.455 INFO  TaskSetManager - Finished task 0.0 in stage 42.0 (TID 80) in 53 ms on localhost (executor driver) (1/1)
20:33:19.455 INFO  TaskSchedulerImpl - Removed TaskSet 42.0, whose tasks have all completed, from pool 
20:33:19.455 INFO  DAGScheduler - ResultStage 42 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.090 s
20:33:19.456 INFO  DAGScheduler - Job 30 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:19.456 INFO  TaskSchedulerImpl - Killing all running tasks in stage 42: Stage finished
20:33:19.456 INFO  DAGScheduler - Job 30 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.092723 s
20:33:19.461 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:19.461 INFO  DAGScheduler - Got job 31 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:19.461 INFO  DAGScheduler - Final stage: ResultStage 43 (count at ReadsSparkSinkUnitTest.java:185)
20:33:19.461 INFO  DAGScheduler - Parents of final stage: List()
20:33:19.461 INFO  DAGScheduler - Missing parents: List()
20:33:19.461 INFO  DAGScheduler - Submitting ResultStage 43 (MapPartitionsRDD[139] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:19.493 INFO  MemoryStore - Block broadcast_65 stored as values in memory (estimated size 426.1 KiB, free 1918.3 MiB)
20:33:19.495 INFO  MemoryStore - Block broadcast_65_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.2 MiB)
20:33:19.495 INFO  BlockManagerInfo - Added broadcast_65_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.6 MiB)
20:33:19.495 INFO  SparkContext - Created broadcast 65 from broadcast at DAGScheduler.scala:1580
20:33:19.496 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 43 (MapPartitionsRDD[139] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:19.496 INFO  TaskSchedulerImpl - Adding task set 43.0 with 1 tasks resource profile 0
20:33:19.497 INFO  TaskSetManager - Starting task 0.0 in stage 43.0 (TID 81) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:19.497 INFO  Executor - Running task 0.0 in stage 43.0 (TID 81)
20:33:19.532 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam:0+237038
20:33:19.533 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.534 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.536 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.537 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.537 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.539 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.539 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.540 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.542 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.545 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.546 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.547 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.548 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.549 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.554 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.554 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.555 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.556 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.557 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.558 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.559 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.559 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.560 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.561 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.563 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.564 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.565 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.566 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.567 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.569 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.569 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.570 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.571 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.572 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.573 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.574 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.575 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.575 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.577 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.578 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.579 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.580 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.581 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.582 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.584 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.584 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.586 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.587 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.587 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.588 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.589 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.590 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.591 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.593 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.595 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.597 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.598 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.599 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.600 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.602 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.603 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.603 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.604 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.605 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.607 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.608 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.609 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.609 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.610 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.611 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.612 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.613 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:19.614 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.615 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.615 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.616 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam	dst=null	perm=null	proto=rpc
20:33:19.618 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.618 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.619 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_f5f3986a-343b-4c50-bdfd-2ee95f7d5310.bam.bai	dst=null	perm=null	proto=rpc
20:33:19.621 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:19.623 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.624 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:19.625 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:19.626 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:19.628 INFO  Executor - Finished task 0.0 in stage 43.0 (TID 81). 989 bytes result sent to driver
20:33:19.628 INFO  TaskSetManager - Finished task 0.0 in stage 43.0 (TID 81) in 132 ms on localhost (executor driver) (1/1)
20:33:19.629 INFO  TaskSchedulerImpl - Removed TaskSet 43.0, whose tasks have all completed, from pool 
20:33:19.629 INFO  DAGScheduler - ResultStage 43 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.167 s
20:33:19.629 INFO  DAGScheduler - Job 31 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:19.629 INFO  TaskSchedulerImpl - Killing all running tasks in stage 43: Stage finished
20:33:19.629 INFO  DAGScheduler - Job 31 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.168320 s
20:33:19.633 INFO  MemoryStore - Block broadcast_66 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:19.639 INFO  MemoryStore - Block broadcast_66_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.8 MiB)
20:33:19.639 INFO  BlockManagerInfo - Added broadcast_66_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:19.640 INFO  SparkContext - Created broadcast 66 from newAPIHadoopFile at PathSplitSource.java:96
20:33:19.664 INFO  MemoryStore - Block broadcast_67 stored as values in memory (estimated size 297.9 KiB, free 1917.6 MiB)
20:33:19.670 INFO  MemoryStore - Block broadcast_67_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.5 MiB)
20:33:19.670 INFO  BlockManagerInfo - Added broadcast_67_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:19.670 INFO  SparkContext - Created broadcast 67 from newAPIHadoopFile at PathSplitSource.java:96
20:33:19.693 INFO  FileInputFormat - Total input files to process : 1
20:33:19.696 INFO  MemoryStore - Block broadcast_68 stored as values in memory (estimated size 160.7 KiB, free 1917.4 MiB)
20:33:19.697 INFO  MemoryStore - Block broadcast_68_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:19.698 INFO  BlockManagerInfo - Added broadcast_68_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:19.698 INFO  SparkContext - Created broadcast 68 from broadcast at ReadsSparkSink.java:133
20:33:19.699 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:19.701 INFO  MemoryStore - Block broadcast_69 stored as values in memory (estimated size 163.2 KiB, free 1917.2 MiB)
20:33:19.702 INFO  MemoryStore - Block broadcast_69_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.2 MiB)
20:33:19.702 INFO  BlockManagerInfo - Added broadcast_69_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:19.702 INFO  SparkContext - Created broadcast 69 from broadcast at BamSink.java:76
20:33:19.705 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts	dst=null	perm=null	proto=rpc
20:33:19.706 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:19.706 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:19.706 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:19.707 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:19.713 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:19.714 INFO  DAGScheduler - Registering RDD 153 (mapToPair at SparkUtils.java:161) as input to shuffle 10
20:33:19.714 INFO  DAGScheduler - Got job 32 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:19.714 INFO  DAGScheduler - Final stage: ResultStage 45 (runJob at SparkHadoopWriter.scala:83)
20:33:19.714 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 44)
20:33:19.715 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 44)
20:33:19.715 INFO  DAGScheduler - Submitting ShuffleMapStage 44 (MapPartitionsRDD[153] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:19.733 INFO  MemoryStore - Block broadcast_70 stored as values in memory (estimated size 520.4 KiB, free 1916.7 MiB)
20:33:19.734 INFO  MemoryStore - Block broadcast_70_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.5 MiB)
20:33:19.735 INFO  BlockManagerInfo - Added broadcast_70_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.3 MiB)
20:33:19.735 INFO  SparkContext - Created broadcast 70 from broadcast at DAGScheduler.scala:1580
20:33:19.735 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 44 (MapPartitionsRDD[153] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:19.735 INFO  TaskSchedulerImpl - Adding task set 44.0 with 1 tasks resource profile 0
20:33:19.736 INFO  TaskSetManager - Starting task 0.0 in stage 44.0 (TID 82) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:19.736 INFO  Executor - Running task 0.0 in stage 44.0 (TID 82)
20:33:19.771 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:19.791 INFO  Executor - Finished task 0.0 in stage 44.0 (TID 82). 1148 bytes result sent to driver
20:33:19.791 INFO  TaskSetManager - Finished task 0.0 in stage 44.0 (TID 82) in 55 ms on localhost (executor driver) (1/1)
20:33:19.792 INFO  TaskSchedulerImpl - Removed TaskSet 44.0, whose tasks have all completed, from pool 
20:33:19.792 INFO  DAGScheduler - ShuffleMapStage 44 (mapToPair at SparkUtils.java:161) finished in 0.077 s
20:33:19.792 INFO  DAGScheduler - looking for newly runnable stages
20:33:19.792 INFO  DAGScheduler - running: HashSet()
20:33:19.792 INFO  DAGScheduler - waiting: HashSet(ResultStage 45)
20:33:19.792 INFO  DAGScheduler - failed: HashSet()
20:33:19.792 INFO  DAGScheduler - Submitting ResultStage 45 (MapPartitionsRDD[158] at mapToPair at BamSink.java:91), which has no missing parents
20:33:19.799 INFO  MemoryStore - Block broadcast_71 stored as values in memory (estimated size 241.5 KiB, free 1916.3 MiB)
20:33:19.800 INFO  MemoryStore - Block broadcast_71_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1916.2 MiB)
20:33:19.801 INFO  BlockManagerInfo - Added broadcast_71_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.3 MiB)
20:33:19.801 INFO  SparkContext - Created broadcast 71 from broadcast at DAGScheduler.scala:1580
20:33:19.801 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 45 (MapPartitionsRDD[158] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:19.801 INFO  TaskSchedulerImpl - Adding task set 45.0 with 1 tasks resource profile 0
20:33:19.802 INFO  TaskSetManager - Starting task 0.0 in stage 45.0 (TID 83) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:19.803 INFO  Executor - Running task 0.0 in stage 45.0 (TID 83)
20:33:19.808 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:19.808 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:19.823 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:19.823 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:19.823 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:19.824 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:19.824 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:19.824 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:19.825 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:19.827 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:19.830 INFO  StateChange - BLOCK* allocate blk_1073741844_1020, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/part-r-00000
20:33:19.833 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741844_1020 src: /127.0.0.1:44466 dest: /127.0.0.1:35765
20:33:19.835 INFO  clienttrace - src: /127.0.0.1:44466, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741844_1020, duration(ns): 1691567
20:33:19.835 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741844_1020, type=LAST_IN_PIPELINE terminating
20:33:19.836 INFO  FSNamesystem - BLOCK* blk_1073741844_1020 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/part-r-00000
20:33:20.237 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:20.238 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:20.240 INFO  StateChange - BLOCK* allocate blk_1073741845_1021, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/.part-r-00000.sbi
20:33:20.241 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741845_1021 src: /127.0.0.1:44476 dest: /127.0.0.1:35765
20:33:20.242 INFO  clienttrace - src: /127.0.0.1:44476, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741845_1021, duration(ns): 529986
20:33:20.242 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741845_1021, type=LAST_IN_PIPELINE terminating
20:33:20.243 INFO  FSNamesystem - BLOCK* blk_1073741845_1021 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/.part-r-00000.sbi
20:33:20.644 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:20.645 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0	dst=null	perm=null	proto=rpc
20:33:20.646 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0	dst=null	perm=null	proto=rpc
20:33:20.647 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/task_202507152033198685888920610117261_0158_r_000000	dst=null	perm=null	proto=rpc
20:33:20.648 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/_temporary/attempt_202507152033198685888920610117261_0158_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/task_202507152033198685888920610117261_0158_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:20.648 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033198685888920610117261_0158_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/task_202507152033198685888920610117261_0158_r_000000
20:33:20.648 INFO  SparkHadoopMapRedUtil - attempt_202507152033198685888920610117261_0158_r_000000_0: Committed. Elapsed time: 2 ms.
20:33:20.649 INFO  Executor - Finished task 0.0 in stage 45.0 (TID 83). 1858 bytes result sent to driver
20:33:20.650 INFO  TaskSetManager - Finished task 0.0 in stage 45.0 (TID 83) in 848 ms on localhost (executor driver) (1/1)
20:33:20.650 INFO  TaskSchedulerImpl - Removed TaskSet 45.0, whose tasks have all completed, from pool 
20:33:20.650 INFO  DAGScheduler - ResultStage 45 (runJob at SparkHadoopWriter.scala:83) finished in 0.857 s
20:33:20.651 INFO  DAGScheduler - Job 32 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:20.651 INFO  TaskSchedulerImpl - Killing all running tasks in stage 45: Stage finished
20:33:20.651 INFO  DAGScheduler - Job 32 finished: runJob at SparkHadoopWriter.scala:83, took 0.937232 s
20:33:20.652 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033198685888920610117261_0158.
20:33:20.652 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:20.653 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts	dst=null	perm=null	proto=rpc
20:33:20.654 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/task_202507152033198685888920610117261_0158_r_000000	dst=null	perm=null	proto=rpc
20:33:20.655 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:20.656 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/task_202507152033198685888920610117261_0158_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:20.657 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:20.657 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary/0/task_202507152033198685888920610117261_0158_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:20.658 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:20.659 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:20.660 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:20.661 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/.spark-staging-158	dst=null	perm=null	proto=rpc
20:33:20.661 INFO  SparkHadoopWriter - Write Job job_202507152033198685888920610117261_0158 committed. Elapsed time: 9 ms.
20:33:20.662 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:20.664 INFO  StateChange - BLOCK* allocate blk_1073741846_1022, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/header
20:33:20.665 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741846_1022 src: /127.0.0.1:44486 dest: /127.0.0.1:35765
20:33:20.667 INFO  clienttrace - src: /127.0.0.1:44486, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741846_1022, duration(ns): 607844
20:33:20.667 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741846_1022, type=LAST_IN_PIPELINE terminating
20:33:20.668 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:20.668 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:20.669 INFO  StateChange - BLOCK* allocate blk_1073741847_1023, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/terminator
20:33:20.670 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741847_1023 src: /127.0.0.1:44502 dest: /127.0.0.1:35765
20:33:20.672 INFO  clienttrace - src: /127.0.0.1:44502, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741847_1023, duration(ns): 500475
20:33:20.672 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741847_1023, type=LAST_IN_PIPELINE terminating
20:33:20.673 INFO  FSNamesystem - BLOCK* blk_1073741847_1023 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/terminator
20:33:21.074 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.075 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.077 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.078 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.078 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam
20:33:21.079 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.080 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.081 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.081 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam done
20:33:21.081 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.082 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi
20:33:21.082 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.083 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.084 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:21.085 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:21.086 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:21.087 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:21.088 INFO  StateChange - BLOCK* allocate blk_1073741848_1024, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi
20:33:21.089 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741848_1024 src: /127.0.0.1:44508 dest: /127.0.0.1:35765
20:33:21.090 INFO  clienttrace - src: /127.0.0.1:44508, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741848_1024, duration(ns): 433552
20:33:21.090 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741848_1024, type=LAST_IN_PIPELINE terminating
20:33:21.092 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.092 INFO  IndexFileMerger - Done merging .sbi files
20:33:21.093 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.102 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.103 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.104 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.105 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:21.106 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.106 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.107 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.108 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.109 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.bai	dst=null	perm=null	proto=rpc
20:33:21.109 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bai	dst=null	perm=null	proto=rpc
20:33:21.111 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:21.113 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.113 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.114 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.115 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:21.115 INFO  MemoryStore - Block broadcast_72 stored as values in memory (estimated size 320.0 B, free 1916.2 MiB)
20:33:21.116 INFO  MemoryStore - Block broadcast_72_piece0 stored as bytes in memory (estimated size 233.0 B, free 1916.2 MiB)
20:33:21.116 INFO  BlockManagerInfo - Added broadcast_72_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.3 MiB)
20:33:21.117 INFO  SparkContext - Created broadcast 72 from broadcast at BamSource.java:104
20:33:21.119 INFO  MemoryStore - Block broadcast_73 stored as values in memory (estimated size 297.9 KiB, free 1915.9 MiB)
20:33:21.125 INFO  MemoryStore - Block broadcast_73_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1915.9 MiB)
20:33:21.125 INFO  BlockManagerInfo - Added broadcast_73_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.2 MiB)
20:33:21.126 INFO  SparkContext - Created broadcast 73 from newAPIHadoopFile at PathSplitSource.java:96
20:33:21.137 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.137 INFO  FileInputFormat - Total input files to process : 1
20:33:21.138 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.156 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:21.156 INFO  DAGScheduler - Got job 33 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:21.156 INFO  DAGScheduler - Final stage: ResultStage 46 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:21.156 INFO  DAGScheduler - Parents of final stage: List()
20:33:21.157 INFO  DAGScheduler - Missing parents: List()
20:33:21.157 INFO  DAGScheduler - Submitting ResultStage 46 (MapPartitionsRDD[164] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:21.166 INFO  MemoryStore - Block broadcast_74 stored as values in memory (estimated size 148.2 KiB, free 1915.7 MiB)
20:33:21.167 INFO  MemoryStore - Block broadcast_74_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1915.7 MiB)
20:33:21.167 INFO  BlockManagerInfo - Added broadcast_74_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.2 MiB)
20:33:21.167 INFO  SparkContext - Created broadcast 74 from broadcast at DAGScheduler.scala:1580
20:33:21.168 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 46 (MapPartitionsRDD[164] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:21.168 INFO  TaskSchedulerImpl - Adding task set 46.0 with 1 tasks resource profile 0
20:33:21.168 INFO  TaskSetManager - Starting task 0.0 in stage 46.0 (TID 84) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:21.169 INFO  Executor - Running task 0.0 in stage 46.0 (TID 84)
20:33:21.182 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam:0+237038
20:33:21.183 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.184 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.185 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.bai	dst=null	perm=null	proto=rpc
20:33:21.186 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bai	dst=null	perm=null	proto=rpc
20:33:21.191 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:21.191 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:21.195 INFO  Executor - Finished task 0.0 in stage 46.0 (TID 84). 651526 bytes result sent to driver
20:33:21.197 INFO  TaskSetManager - Finished task 0.0 in stage 46.0 (TID 84) in 29 ms on localhost (executor driver) (1/1)
20:33:21.197 INFO  TaskSchedulerImpl - Removed TaskSet 46.0, whose tasks have all completed, from pool 
20:33:21.197 INFO  DAGScheduler - ResultStage 46 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.040 s
20:33:21.197 INFO  DAGScheduler - Job 33 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:21.197 INFO  TaskSchedulerImpl - Killing all running tasks in stage 46: Stage finished
20:33:21.198 INFO  DAGScheduler - Job 33 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.041897 s
20:33:21.208 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:21.209 INFO  DAGScheduler - Got job 34 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:21.209 INFO  DAGScheduler - Final stage: ResultStage 47 (count at ReadsSparkSinkUnitTest.java:185)
20:33:21.209 INFO  DAGScheduler - Parents of final stage: List()
20:33:21.209 INFO  DAGScheduler - Missing parents: List()
20:33:21.209 INFO  DAGScheduler - Submitting ResultStage 47 (MapPartitionsRDD[146] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:21.236 INFO  MemoryStore - Block broadcast_75 stored as values in memory (estimated size 426.1 KiB, free 1915.2 MiB)
20:33:21.238 INFO  MemoryStore - Block broadcast_75_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.1 MiB)
20:33:21.238 INFO  BlockManagerInfo - Added broadcast_75_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.0 MiB)
20:33:21.239 INFO  SparkContext - Created broadcast 75 from broadcast at DAGScheduler.scala:1580
20:33:21.239 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 47 (MapPartitionsRDD[146] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:21.239 INFO  TaskSchedulerImpl - Adding task set 47.0 with 1 tasks resource profile 0
20:33:21.240 INFO  TaskSetManager - Starting task 0.0 in stage 47.0 (TID 85) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:21.240 INFO  Executor - Running task 0.0 in stage 47.0 (TID 85)
20:33:21.260 INFO  BlockManagerInfo - Removed broadcast_74_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.1 MiB)
20:33:21.261 INFO  BlockManagerInfo - Removed broadcast_68_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.1 MiB)
20:33:21.261 INFO  BlockManagerInfo - Removed broadcast_71_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.1 MiB)
20:33:21.262 INFO  BlockManagerInfo - Removed broadcast_64_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.3 MiB)
20:33:21.263 INFO  BlockManagerInfo - Removed broadcast_67_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:21.264 INFO  BlockManagerInfo - Removed broadcast_70_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:21.265 INFO  BlockManagerInfo - Removed broadcast_62_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:21.266 INFO  BlockManagerInfo - Removed broadcast_56_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:21.267 INFO  BlockManagerInfo - Removed broadcast_65_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:21.267 INFO  BlockManagerInfo - Removed broadcast_69_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:21.287 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:21.299 INFO  Executor - Finished task 0.0 in stage 47.0 (TID 85). 1032 bytes result sent to driver
20:33:21.300 INFO  TaskSetManager - Finished task 0.0 in stage 47.0 (TID 85) in 61 ms on localhost (executor driver) (1/1)
20:33:21.300 INFO  TaskSchedulerImpl - Removed TaskSet 47.0, whose tasks have all completed, from pool 
20:33:21.300 INFO  DAGScheduler - ResultStage 47 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.090 s
20:33:21.300 INFO  DAGScheduler - Job 34 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:21.300 INFO  TaskSchedulerImpl - Killing all running tasks in stage 47: Stage finished
20:33:21.300 INFO  DAGScheduler - Job 34 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.091936 s
20:33:21.304 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:21.304 INFO  DAGScheduler - Got job 35 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:21.304 INFO  DAGScheduler - Final stage: ResultStage 48 (count at ReadsSparkSinkUnitTest.java:185)
20:33:21.304 INFO  DAGScheduler - Parents of final stage: List()
20:33:21.305 INFO  DAGScheduler - Missing parents: List()
20:33:21.305 INFO  DAGScheduler - Submitting ResultStage 48 (MapPartitionsRDD[164] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:21.312 INFO  MemoryStore - Block broadcast_76 stored as values in memory (estimated size 148.1 KiB, free 1918.6 MiB)
20:33:21.312 INFO  MemoryStore - Block broadcast_76_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.6 MiB)
20:33:21.313 INFO  BlockManagerInfo - Added broadcast_76_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.7 MiB)
20:33:21.313 INFO  SparkContext - Created broadcast 76 from broadcast at DAGScheduler.scala:1580
20:33:21.313 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 48 (MapPartitionsRDD[164] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:21.313 INFO  TaskSchedulerImpl - Adding task set 48.0 with 1 tasks resource profile 0
20:33:21.314 INFO  TaskSetManager - Starting task 0.0 in stage 48.0 (TID 86) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:21.315 INFO  Executor - Running task 0.0 in stage 48.0 (TID 86)
20:33:21.332 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam:0+237038
20:33:21.333 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.333 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam	dst=null	perm=null	proto=rpc
20:33:21.335 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bam.bai	dst=null	perm=null	proto=rpc
20:33:21.335 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_769eb61d-afef-4307-99ed-77949f49d212.bai	dst=null	perm=null	proto=rpc
20:33:21.339 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:21.341 INFO  Executor - Finished task 0.0 in stage 48.0 (TID 86). 989 bytes result sent to driver
20:33:21.342 INFO  TaskSetManager - Finished task 0.0 in stage 48.0 (TID 86) in 28 ms on localhost (executor driver) (1/1)
20:33:21.342 INFO  TaskSchedulerImpl - Removed TaskSet 48.0, whose tasks have all completed, from pool 
20:33:21.342 INFO  DAGScheduler - ResultStage 48 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.037 s
20:33:21.342 INFO  DAGScheduler - Job 35 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:21.342 INFO  TaskSchedulerImpl - Killing all running tasks in stage 48: Stage finished
20:33:21.342 INFO  DAGScheduler - Job 35 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.038371 s
20:33:21.346 INFO  MemoryStore - Block broadcast_77 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:21.352 INFO  MemoryStore - Block broadcast_77_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:21.353 INFO  BlockManagerInfo - Added broadcast_77_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:21.353 INFO  SparkContext - Created broadcast 77 from newAPIHadoopFile at PathSplitSource.java:96
20:33:21.377 INFO  MemoryStore - Block broadcast_78 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:21.384 INFO  MemoryStore - Block broadcast_78_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:21.384 INFO  BlockManagerInfo - Added broadcast_78_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:21.384 INFO  SparkContext - Created broadcast 78 from newAPIHadoopFile at PathSplitSource.java:96
20:33:21.399 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741840_1016 replica FinalizedReplica, blk_1073741840_1016, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741840 for deletion
20:33:21.399 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741840_1016 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741840
20:33:21.409 INFO  FileInputFormat - Total input files to process : 1
20:33:21.411 INFO  MemoryStore - Block broadcast_79 stored as values in memory (estimated size 160.7 KiB, free 1917.7 MiB)
20:33:21.412 INFO  MemoryStore - Block broadcast_79_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:21.412 INFO  BlockManagerInfo - Added broadcast_79_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:21.412 INFO  SparkContext - Created broadcast 79 from broadcast at ReadsSparkSink.java:133
20:33:21.413 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:21.413 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:21.414 INFO  MemoryStore - Block broadcast_80 stored as values in memory (estimated size 163.2 KiB, free 1917.5 MiB)
20:33:21.415 INFO  MemoryStore - Block broadcast_80_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:21.416 INFO  BlockManagerInfo - Added broadcast_80_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:21.416 INFO  SparkContext - Created broadcast 80 from broadcast at BamSink.java:76
20:33:21.418 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.419 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:21.419 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:21.419 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:21.420 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:21.426 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:21.427 INFO  DAGScheduler - Registering RDD 178 (mapToPair at SparkUtils.java:161) as input to shuffle 11
20:33:21.427 INFO  DAGScheduler - Got job 36 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:21.427 INFO  DAGScheduler - Final stage: ResultStage 50 (runJob at SparkHadoopWriter.scala:83)
20:33:21.427 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 49)
20:33:21.427 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 49)
20:33:21.428 INFO  DAGScheduler - Submitting ShuffleMapStage 49 (MapPartitionsRDD[178] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:21.452 INFO  MemoryStore - Block broadcast_81 stored as values in memory (estimated size 520.4 KiB, free 1917.0 MiB)
20:33:21.454 INFO  MemoryStore - Block broadcast_81_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.9 MiB)
20:33:21.454 INFO  BlockManagerInfo - Added broadcast_81_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:21.454 INFO  SparkContext - Created broadcast 81 from broadcast at DAGScheduler.scala:1580
20:33:21.454 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 49 (MapPartitionsRDD[178] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:21.454 INFO  TaskSchedulerImpl - Adding task set 49.0 with 1 tasks resource profile 0
20:33:21.455 INFO  TaskSetManager - Starting task 0.0 in stage 49.0 (TID 87) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:21.456 INFO  Executor - Running task 0.0 in stage 49.0 (TID 87)
20:33:21.490 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:21.509 INFO  Executor - Finished task 0.0 in stage 49.0 (TID 87). 1148 bytes result sent to driver
20:33:21.510 INFO  TaskSetManager - Finished task 0.0 in stage 49.0 (TID 87) in 55 ms on localhost (executor driver) (1/1)
20:33:21.510 INFO  TaskSchedulerImpl - Removed TaskSet 49.0, whose tasks have all completed, from pool 
20:33:21.510 INFO  DAGScheduler - ShuffleMapStage 49 (mapToPair at SparkUtils.java:161) finished in 0.082 s
20:33:21.510 INFO  DAGScheduler - looking for newly runnable stages
20:33:21.510 INFO  DAGScheduler - running: HashSet()
20:33:21.510 INFO  DAGScheduler - waiting: HashSet(ResultStage 50)
20:33:21.510 INFO  DAGScheduler - failed: HashSet()
20:33:21.510 INFO  DAGScheduler - Submitting ResultStage 50 (MapPartitionsRDD[183] at mapToPair at BamSink.java:91), which has no missing parents
20:33:21.518 INFO  MemoryStore - Block broadcast_82 stored as values in memory (estimated size 241.5 KiB, free 1916.6 MiB)
20:33:21.518 INFO  MemoryStore - Block broadcast_82_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1916.6 MiB)
20:33:21.519 INFO  BlockManagerInfo - Added broadcast_82_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.4 MiB)
20:33:21.519 INFO  SparkContext - Created broadcast 82 from broadcast at DAGScheduler.scala:1580
20:33:21.519 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 50 (MapPartitionsRDD[183] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:21.519 INFO  TaskSchedulerImpl - Adding task set 50.0 with 1 tasks resource profile 0
20:33:21.520 INFO  TaskSetManager - Starting task 0.0 in stage 50.0 (TID 88) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:21.520 INFO  Executor - Running task 0.0 in stage 50.0 (TID 88)
20:33:21.525 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:21.526 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:21.541 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:21.541 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:21.541 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:21.541 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:21.541 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:21.541 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:21.543 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.547 INFO  StateChange - BLOCK* allocate blk_1073741849_1025, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0/part-r-00000
20:33:21.548 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741849_1025 src: /127.0.0.1:44510 dest: /127.0.0.1:35765
20:33:21.551 INFO  clienttrace - src: /127.0.0.1:44510, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741849_1025, duration(ns): 2101177
20:33:21.552 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741849_1025, type=LAST_IN_PIPELINE terminating
20:33:21.553 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.554 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:21.554 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0	dst=null	perm=null	proto=rpc
20:33:21.555 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0	dst=null	perm=null	proto=rpc
20:33:21.556 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/task_20250715203321574079127111289625_0183_r_000000	dst=null	perm=null	proto=rpc
20:33:21.556 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/_temporary/attempt_20250715203321574079127111289625_0183_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/task_20250715203321574079127111289625_0183_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:21.557 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203321574079127111289625_0183_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/task_20250715203321574079127111289625_0183_r_000000
20:33:21.557 INFO  SparkHadoopMapRedUtil - attempt_20250715203321574079127111289625_0183_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:21.557 INFO  Executor - Finished task 0.0 in stage 50.0 (TID 88). 1858 bytes result sent to driver
20:33:21.559 INFO  TaskSetManager - Finished task 0.0 in stage 50.0 (TID 88) in 39 ms on localhost (executor driver) (1/1)
20:33:21.559 INFO  TaskSchedulerImpl - Removed TaskSet 50.0, whose tasks have all completed, from pool 
20:33:21.559 INFO  DAGScheduler - ResultStage 50 (runJob at SparkHadoopWriter.scala:83) finished in 0.048 s
20:33:21.559 INFO  DAGScheduler - Job 36 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:21.559 INFO  TaskSchedulerImpl - Killing all running tasks in stage 50: Stage finished
20:33:21.559 INFO  DAGScheduler - Job 36 finished: runJob at SparkHadoopWriter.scala:83, took 0.133082 s
20:33:21.560 INFO  SparkHadoopWriter - Start to commit write Job job_20250715203321574079127111289625_0183.
20:33:21.561 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:21.562 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.562 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/task_20250715203321574079127111289625_0183_r_000000	dst=null	perm=null	proto=rpc
20:33:21.563 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:21.564 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary/0/task_20250715203321574079127111289625_0183_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.565 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:21.566 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.567 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.568 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/.spark-staging-183	dst=null	perm=null	proto=rpc
20:33:21.568 INFO  SparkHadoopWriter - Write Job job_20250715203321574079127111289625_0183 committed. Elapsed time: 7 ms.
20:33:21.568 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.571 INFO  StateChange - BLOCK* allocate blk_1073741850_1026, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/header
20:33:21.572 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741850_1026 src: /127.0.0.1:44514 dest: /127.0.0.1:35765
20:33:21.573 INFO  clienttrace - src: /127.0.0.1:44514, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741850_1026, duration(ns): 484826
20:33:21.573 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741850_1026, type=LAST_IN_PIPELINE terminating
20:33:21.574 INFO  FSNamesystem - BLOCK* blk_1073741850_1026 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/header
20:33:21.975 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.976 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.977 INFO  StateChange - BLOCK* allocate blk_1073741851_1027, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/terminator
20:33:21.979 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741851_1027 src: /127.0.0.1:44524 dest: /127.0.0.1:35765
20:33:21.980 INFO  clienttrace - src: /127.0.0.1:44524, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741851_1027, duration(ns): 506333
20:33:21.980 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741851_1027, type=LAST_IN_PIPELINE terminating
20:33:21.981 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.982 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.984 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.985 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:21.985 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam
20:33:21.986 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.986 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:21.987 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:21.987 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam done
20:33:21.988 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:21.989 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.parts	dst=null	perm=null	proto=rpc
20:33:21.989 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:21.990 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:21.990 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:21.991 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:21.992 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.bai	dst=null	perm=null	proto=rpc
20:33:21.992 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bai	dst=null	perm=null	proto=rpc
20:33:21.994 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:21.996 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:21.996 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.sbi	dst=null	perm=null	proto=rpc
20:33:21.999 INFO  MemoryStore - Block broadcast_83 stored as values in memory (estimated size 297.9 KiB, free 1916.3 MiB)
20:33:22.010 INFO  MemoryStore - Block broadcast_83_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.2 MiB)
20:33:22.010 INFO  BlockManagerInfo - Added broadcast_83_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:22.010 INFO  SparkContext - Created broadcast 83 from newAPIHadoopFile at PathSplitSource.java:96
20:33:22.033 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.033 INFO  FileInputFormat - Total input files to process : 1
20:33:22.033 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.072 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:22.072 INFO  DAGScheduler - Got job 37 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:22.072 INFO  DAGScheduler - Final stage: ResultStage 51 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:22.072 INFO  DAGScheduler - Parents of final stage: List()
20:33:22.072 INFO  DAGScheduler - Missing parents: List()
20:33:22.073 INFO  DAGScheduler - Submitting ResultStage 51 (MapPartitionsRDD[190] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:22.090 INFO  MemoryStore - Block broadcast_84 stored as values in memory (estimated size 426.2 KiB, free 1915.8 MiB)
20:33:22.092 INFO  MemoryStore - Block broadcast_84_piece0 stored as bytes in memory (estimated size 153.7 KiB, free 1915.7 MiB)
20:33:22.092 INFO  BlockManagerInfo - Added broadcast_84_piece0 in memory on localhost:45281 (size: 153.7 KiB, free: 1919.2 MiB)
20:33:22.092 INFO  SparkContext - Created broadcast 84 from broadcast at DAGScheduler.scala:1580
20:33:22.093 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 51 (MapPartitionsRDD[190] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:22.093 INFO  TaskSchedulerImpl - Adding task set 51.0 with 1 tasks resource profile 0
20:33:22.093 INFO  TaskSetManager - Starting task 0.0 in stage 51.0 (TID 89) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:22.094 INFO  Executor - Running task 0.0 in stage 51.0 (TID 89)
20:33:22.131 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam:0+237038
20:33:22.132 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.133 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.135 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.135 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.136 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.137 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.bai	dst=null	perm=null	proto=rpc
20:33:22.137 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bai	dst=null	perm=null	proto=rpc
20:33:22.139 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.141 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.142 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.143 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.144 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.149 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.151 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.153 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.155 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.157 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.159 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.160 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.161 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.162 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.163 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.164 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.165 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.166 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.167 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.168 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.169 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.170 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.171 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.172 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.174 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.176 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.177 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.180 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.181 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.182 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.183 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.185 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.187 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.187 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.188 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.189 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.192 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.192 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.193 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.194 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.195 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.196 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.197 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.198 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.199 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.200 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.201 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.201 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.202 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.203 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.203 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.204 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.205 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.206 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.207 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.208 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.208 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.209 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.210 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.211 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.211 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.212 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.213 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.213 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.214 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.215 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.216 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.bai	dst=null	perm=null	proto=rpc
20:33:22.217 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bai	dst=null	perm=null	proto=rpc
20:33:22.218 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.221 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.222 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:22.226 INFO  Executor - Finished task 0.0 in stage 51.0 (TID 89). 651526 bytes result sent to driver
20:33:22.228 INFO  TaskSetManager - Finished task 0.0 in stage 51.0 (TID 89) in 135 ms on localhost (executor driver) (1/1)
20:33:22.228 INFO  TaskSchedulerImpl - Removed TaskSet 51.0, whose tasks have all completed, from pool 
20:33:22.229 INFO  DAGScheduler - ResultStage 51 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.156 s
20:33:22.229 INFO  DAGScheduler - Job 37 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:22.229 INFO  TaskSchedulerImpl - Killing all running tasks in stage 51: Stage finished
20:33:22.229 INFO  DAGScheduler - Job 37 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.157120 s
20:33:22.245 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:22.245 INFO  DAGScheduler - Got job 38 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:22.245 INFO  DAGScheduler - Final stage: ResultStage 52 (count at ReadsSparkSinkUnitTest.java:185)
20:33:22.245 INFO  DAGScheduler - Parents of final stage: List()
20:33:22.245 INFO  DAGScheduler - Missing parents: List()
20:33:22.246 INFO  DAGScheduler - Submitting ResultStage 52 (MapPartitionsRDD[171] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:22.266 INFO  MemoryStore - Block broadcast_85 stored as values in memory (estimated size 426.1 KiB, free 1915.2 MiB)
20:33:22.268 INFO  MemoryStore - Block broadcast_85_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.1 MiB)
20:33:22.268 INFO  BlockManagerInfo - Added broadcast_85_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.0 MiB)
20:33:22.269 INFO  SparkContext - Created broadcast 85 from broadcast at DAGScheduler.scala:1580
20:33:22.269 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 52 (MapPartitionsRDD[171] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:22.269 INFO  TaskSchedulerImpl - Adding task set 52.0 with 1 tasks resource profile 0
20:33:22.270 INFO  TaskSetManager - Starting task 0.0 in stage 52.0 (TID 90) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:22.270 INFO  Executor - Running task 0.0 in stage 52.0 (TID 90)
20:33:22.319 INFO  BlockManagerInfo - Removed broadcast_72_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.0 MiB)
20:33:22.320 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:22.322 INFO  BlockManagerInfo - Removed broadcast_80_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.0 MiB)
20:33:22.322 INFO  BlockManagerInfo - Removed broadcast_82_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.1 MiB)
20:33:22.323 INFO  BlockManagerInfo - Removed broadcast_79_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.1 MiB)
20:33:22.324 INFO  BlockManagerInfo - Removed broadcast_73_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.1 MiB)
20:33:22.325 INFO  BlockManagerInfo - Removed broadcast_75_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.3 MiB)
20:33:22.326 INFO  BlockManagerInfo - Removed broadcast_81_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:22.326 INFO  BlockManagerInfo - Removed broadcast_78_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:22.328 INFO  BlockManagerInfo - Removed broadcast_66_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:22.329 INFO  BlockManagerInfo - Removed broadcast_76_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.6 MiB)
20:33:22.330 INFO  BlockManagerInfo - Removed broadcast_84_piece0 on localhost:45281 in memory (size: 153.7 KiB, free: 1919.8 MiB)
20:33:22.333 INFO  Executor - Finished task 0.0 in stage 52.0 (TID 90). 1032 bytes result sent to driver
20:33:22.333 INFO  TaskSetManager - Finished task 0.0 in stage 52.0 (TID 90) in 64 ms on localhost (executor driver) (1/1)
20:33:22.333 INFO  TaskSchedulerImpl - Removed TaskSet 52.0, whose tasks have all completed, from pool 
20:33:22.334 INFO  DAGScheduler - ResultStage 52 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.088 s
20:33:22.334 INFO  DAGScheduler - Job 38 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:22.334 INFO  TaskSchedulerImpl - Killing all running tasks in stage 52: Stage finished
20:33:22.334 INFO  DAGScheduler - Job 38 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.089045 s
20:33:22.337 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:22.338 INFO  DAGScheduler - Got job 39 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:22.338 INFO  DAGScheduler - Final stage: ResultStage 53 (count at ReadsSparkSinkUnitTest.java:185)
20:33:22.338 INFO  DAGScheduler - Parents of final stage: List()
20:33:22.338 INFO  DAGScheduler - Missing parents: List()
20:33:22.338 INFO  DAGScheduler - Submitting ResultStage 53 (MapPartitionsRDD[190] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:22.359 INFO  MemoryStore - Block broadcast_86 stored as values in memory (estimated size 426.1 KiB, free 1918.3 MiB)
20:33:22.360 INFO  MemoryStore - Block broadcast_86_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.2 MiB)
20:33:22.360 INFO  BlockManagerInfo - Added broadcast_86_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.6 MiB)
20:33:22.361 INFO  SparkContext - Created broadcast 86 from broadcast at DAGScheduler.scala:1580
20:33:22.361 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 53 (MapPartitionsRDD[190] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:22.361 INFO  TaskSchedulerImpl - Adding task set 53.0 with 1 tasks resource profile 0
20:33:22.362 INFO  TaskSetManager - Starting task 0.0 in stage 53.0 (TID 91) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:22.362 INFO  Executor - Running task 0.0 in stage 53.0 (TID 91)
20:33:22.398 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam:0+237038
20:33:22.399 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.399 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.401 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.401 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.402 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.403 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.bai	dst=null	perm=null	proto=rpc
20:33:22.404 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bai	dst=null	perm=null	proto=rpc
20:33:22.405 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.407 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.407 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.408 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.409 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.413 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.414 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.416 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.417 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.417 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.418 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.419 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.419 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.420 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.421 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.422 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.422 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.423 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.424 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.425 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.426 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.427 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.428 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.431 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.432 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.433 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.434 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.434 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.435 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.436 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.437 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.437 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.438 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.439 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.440 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.441 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.441 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.442 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.443 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.444 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.444 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.445 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.446 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.447 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.447 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.449 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.451 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.452 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.453 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.454 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.455 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.457 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.457 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.458 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.459 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.460 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.462 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.464 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.464 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.465 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.466 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.466 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.468 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.468 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:22.469 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.469 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.470 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.470 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam	dst=null	perm=null	proto=rpc
20:33:22.471 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bam.bai	dst=null	perm=null	proto=rpc
20:33:22.472 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_33ed502f-9d9a-4aba-90c7-6ebeff5d844b.bai	dst=null	perm=null	proto=rpc
20:33:22.473 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:22.476 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:22.477 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:22.479 INFO  Executor - Finished task 0.0 in stage 53.0 (TID 91). 989 bytes result sent to driver
20:33:22.480 INFO  TaskSetManager - Finished task 0.0 in stage 53.0 (TID 91) in 117 ms on localhost (executor driver) (1/1)
20:33:22.480 INFO  TaskSchedulerImpl - Removed TaskSet 53.0, whose tasks have all completed, from pool 
20:33:22.480 INFO  DAGScheduler - ResultStage 53 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.141 s
20:33:22.480 INFO  DAGScheduler - Job 39 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:22.480 INFO  TaskSchedulerImpl - Killing all running tasks in stage 53: Stage finished
20:33:22.480 INFO  DAGScheduler - Job 39 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.142615 s
20:33:22.485 INFO  MemoryStore - Block broadcast_87 stored as values in memory (estimated size 298.0 KiB, free 1917.9 MiB)
20:33:22.491 INFO  MemoryStore - Block broadcast_87_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1917.8 MiB)
20:33:22.491 INFO  BlockManagerInfo - Added broadcast_87_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.6 MiB)
20:33:22.492 INFO  SparkContext - Created broadcast 87 from newAPIHadoopFile at PathSplitSource.java:96
20:33:22.519 INFO  MemoryStore - Block broadcast_88 stored as values in memory (estimated size 298.0 KiB, free 1917.6 MiB)
20:33:22.525 INFO  MemoryStore - Block broadcast_88_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1917.5 MiB)
20:33:22.525 INFO  BlockManagerInfo - Added broadcast_88_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.5 MiB)
20:33:22.525 INFO  SparkContext - Created broadcast 88 from newAPIHadoopFile at PathSplitSource.java:96
20:33:22.546 INFO  FileInputFormat - Total input files to process : 1
20:33:22.549 INFO  MemoryStore - Block broadcast_89 stored as values in memory (estimated size 160.7 KiB, free 1917.4 MiB)
20:33:22.550 INFO  MemoryStore - Block broadcast_89_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:22.550 INFO  BlockManagerInfo - Added broadcast_89_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:22.551 INFO  SparkContext - Created broadcast 89 from broadcast at ReadsSparkSink.java:133
20:33:22.552 INFO  MemoryStore - Block broadcast_90 stored as values in memory (estimated size 163.2 KiB, free 1917.2 MiB)
20:33:22.553 INFO  MemoryStore - Block broadcast_90_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.2 MiB)
20:33:22.553 INFO  BlockManagerInfo - Added broadcast_90_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:22.553 INFO  SparkContext - Created broadcast 90 from broadcast at BamSink.java:76
20:33:22.556 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts	dst=null	perm=null	proto=rpc
20:33:22.556 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:22.557 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:22.557 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:22.558 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:22.568 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:22.569 INFO  DAGScheduler - Registering RDD 204 (mapToPair at SparkUtils.java:161) as input to shuffle 12
20:33:22.569 INFO  DAGScheduler - Got job 40 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:22.569 INFO  DAGScheduler - Final stage: ResultStage 55 (runJob at SparkHadoopWriter.scala:83)
20:33:22.569 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 54)
20:33:22.569 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 54)
20:33:22.570 INFO  DAGScheduler - Submitting ShuffleMapStage 54 (MapPartitionsRDD[204] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:22.587 INFO  MemoryStore - Block broadcast_91 stored as values in memory (estimated size 520.4 KiB, free 1916.7 MiB)
20:33:22.589 INFO  MemoryStore - Block broadcast_91_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.5 MiB)
20:33:22.589 INFO  BlockManagerInfo - Added broadcast_91_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.3 MiB)
20:33:22.590 INFO  SparkContext - Created broadcast 91 from broadcast at DAGScheduler.scala:1580
20:33:22.590 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 54 (MapPartitionsRDD[204] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:22.590 INFO  TaskSchedulerImpl - Adding task set 54.0 with 1 tasks resource profile 0
20:33:22.590 INFO  TaskSetManager - Starting task 0.0 in stage 54.0 (TID 92) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7901 bytes) 
20:33:22.591 INFO  Executor - Running task 0.0 in stage 54.0 (TID 92)
20:33:22.627 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:22.646 INFO  Executor - Finished task 0.0 in stage 54.0 (TID 92). 1148 bytes result sent to driver
20:33:22.647 INFO  TaskSetManager - Finished task 0.0 in stage 54.0 (TID 92) in 57 ms on localhost (executor driver) (1/1)
20:33:22.647 INFO  TaskSchedulerImpl - Removed TaskSet 54.0, whose tasks have all completed, from pool 
20:33:22.647 INFO  DAGScheduler - ShuffleMapStage 54 (mapToPair at SparkUtils.java:161) finished in 0.077 s
20:33:22.647 INFO  DAGScheduler - looking for newly runnable stages
20:33:22.647 INFO  DAGScheduler - running: HashSet()
20:33:22.647 INFO  DAGScheduler - waiting: HashSet(ResultStage 55)
20:33:22.647 INFO  DAGScheduler - failed: HashSet()
20:33:22.648 INFO  DAGScheduler - Submitting ResultStage 55 (MapPartitionsRDD[209] at mapToPair at BamSink.java:91), which has no missing parents
20:33:22.655 INFO  MemoryStore - Block broadcast_92 stored as values in memory (estimated size 241.5 KiB, free 1916.3 MiB)
20:33:22.656 INFO  MemoryStore - Block broadcast_92_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1916.2 MiB)
20:33:22.656 INFO  BlockManagerInfo - Added broadcast_92_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.3 MiB)
20:33:22.656 INFO  SparkContext - Created broadcast 92 from broadcast at DAGScheduler.scala:1580
20:33:22.657 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 55 (MapPartitionsRDD[209] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:22.657 INFO  TaskSchedulerImpl - Adding task set 55.0 with 1 tasks resource profile 0
20:33:22.657 INFO  TaskSetManager - Starting task 0.0 in stage 55.0 (TID 93) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:22.658 INFO  Executor - Running task 0.0 in stage 55.0 (TID 93)
20:33:22.663 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:22.663 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:22.678 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:22.678 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:22.678 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:22.678 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:22.678 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:22.678 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:22.680 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:22.681 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:22.682 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:22.687 INFO  StateChange - BLOCK* allocate blk_1073741852_1028, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/part-r-00000
20:33:22.688 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741852_1028 src: /127.0.0.1:45224 dest: /127.0.0.1:35765
20:33:22.692 INFO  clienttrace - src: /127.0.0.1:45224, dest: /127.0.0.1:35765, bytes: 229774, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741852_1028, duration(ns): 2749923
20:33:22.692 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741852_1028, type=LAST_IN_PIPELINE terminating
20:33:22.693 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:22.694 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:22.695 INFO  StateChange - BLOCK* allocate blk_1073741853_1029, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.sbi
20:33:22.695 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741853_1029 src: /127.0.0.1:45236 dest: /127.0.0.1:35765
20:33:22.696 INFO  clienttrace - src: /127.0.0.1:45236, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741853_1029, duration(ns): 386132
20:33:22.696 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741853_1029, type=LAST_IN_PIPELINE terminating
20:33:22.697 INFO  FSNamesystem - BLOCK* blk_1073741853_1029 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.sbi
20:33:23.098 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.100 INFO  StateChange - BLOCK* allocate blk_1073741854_1030, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.bai
20:33:23.101 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741854_1030 src: /127.0.0.1:45252 dest: /127.0.0.1:35765
20:33:23.103 INFO  clienttrace - src: /127.0.0.1:45252, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741854_1030, duration(ns): 567925
20:33:23.103 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741854_1030, type=LAST_IN_PIPELINE terminating
20:33:23.104 INFO  FSNamesystem - BLOCK* blk_1073741854_1030 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.bai
20:33:23.505 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.506 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0	dst=null	perm=null	proto=rpc
20:33:23.507 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0	dst=null	perm=null	proto=rpc
20:33:23.507 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000	dst=null	perm=null	proto=rpc
20:33:23.508 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/_temporary/attempt_202507152033227646701957989259627_0209_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:23.508 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033227646701957989259627_0209_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000
20:33:23.508 INFO  SparkHadoopMapRedUtil - attempt_202507152033227646701957989259627_0209_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:23.509 INFO  Executor - Finished task 0.0 in stage 55.0 (TID 93). 1858 bytes result sent to driver
20:33:23.509 INFO  TaskSetManager - Finished task 0.0 in stage 55.0 (TID 93) in 852 ms on localhost (executor driver) (1/1)
20:33:23.510 INFO  TaskSchedulerImpl - Removed TaskSet 55.0, whose tasks have all completed, from pool 
20:33:23.510 INFO  DAGScheduler - ResultStage 55 (runJob at SparkHadoopWriter.scala:83) finished in 0.862 s
20:33:23.510 INFO  DAGScheduler - Job 40 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:23.510 INFO  TaskSchedulerImpl - Killing all running tasks in stage 55: Stage finished
20:33:23.510 INFO  DAGScheduler - Job 40 finished: runJob at SparkHadoopWriter.scala:83, took 0.941908 s
20:33:23.511 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033227646701957989259627_0209.
20:33:23.511 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:23.512 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts	dst=null	perm=null	proto=rpc
20:33:23.512 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000	dst=null	perm=null	proto=rpc
20:33:23.513 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:23.514 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.514 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:23.515 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.515 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:23.516 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary/0/task_202507152033227646701957989259627_0209_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.516 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:23.517 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.518 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.519 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.spark-staging-209	dst=null	perm=null	proto=rpc
20:33:23.519 INFO  SparkHadoopWriter - Write Job job_202507152033227646701957989259627_0209 committed. Elapsed time: 7 ms.
20:33:23.519 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.521 INFO  StateChange - BLOCK* allocate blk_1073741855_1031, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/header
20:33:23.522 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741855_1031 src: /127.0.0.1:45256 dest: /127.0.0.1:35765
20:33:23.523 INFO  clienttrace - src: /127.0.0.1:45256, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741855_1031, duration(ns): 482751
20:33:23.523 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741855_1031, type=LAST_IN_PIPELINE terminating
20:33:23.524 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.525 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.526 INFO  StateChange - BLOCK* allocate blk_1073741856_1032, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/terminator
20:33:23.527 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741856_1032 src: /127.0.0.1:45266 dest: /127.0.0.1:35765
20:33:23.528 INFO  clienttrace - src: /127.0.0.1:45266, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741856_1032, duration(ns): 382672
20:33:23.528 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741856_1032, type=LAST_IN_PIPELINE terminating
20:33:23.528 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.529 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts	dst=null	perm=null	proto=rpc
20:33:23.531 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.532 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.532 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam
20:33:23.532 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.533 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:23.534 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.534 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam done
20:33:23.535 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:23.535 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi
20:33:23.535 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts	dst=null	perm=null	proto=rpc
20:33:23.536 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.537 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:23.538 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:23.539 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:23.540 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:23.541 INFO  StateChange - BLOCK* allocate blk_1073741857_1033, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi
20:33:23.542 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741857_1033 src: /127.0.0.1:45270 dest: /127.0.0.1:35765
20:33:23.544 INFO  clienttrace - src: /127.0.0.1:45270, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741857_1033, duration(ns): 1241277
20:33:23.544 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741857_1033, type=LAST_IN_PIPELINE terminating
20:33:23.546 INFO  FSNamesystem - BLOCK* blk_1073741857_1033 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi
20:33:23.948 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:23.948 INFO  IndexFileMerger - Done merging .sbi files
20:33:23.948 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai
20:33:23.949 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts	dst=null	perm=null	proto=rpc
20:33:23.950 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:23.951 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:23.951 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:23.953 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:23.953 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:23.958 INFO  StateChange - BLOCK* allocate blk_1073741858_1034, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai
20:33:23.959 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741858_1034 src: /127.0.0.1:45276 dest: /127.0.0.1:35765
20:33:23.960 INFO  clienttrace - src: /127.0.0.1:45276, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741858_1034, duration(ns): 469467
20:33:23.960 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741858_1034, type=LAST_IN_PIPELINE terminating
20:33:23.961 INFO  FSNamesystem - BLOCK* blk_1073741858_1034 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai
20:33:24.362 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:24.362 INFO  IndexFileMerger - Done merging .bai files
20:33:24.363 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.parts	dst=null	perm=null	proto=rpc
20:33:24.373 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.381 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=null	proto=rpc
20:33:24.382 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=null	proto=rpc
20:33:24.383 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=null	proto=rpc
20:33:24.384 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:24.385 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.385 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.386 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.387 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.388 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.388 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.389 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.391 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:24.392 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:24.393 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:24.393 WARN  DFSUtil - Unexpected value for data transfer bytes=231570 duration=0
20:33:24.394 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=null	proto=rpc
20:33:24.394 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=null	proto=rpc
20:33:24.395 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.sbi	dst=null	perm=null	proto=rpc
20:33:24.396 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:24.396 INFO  MemoryStore - Block broadcast_93 stored as values in memory (estimated size 320.0 B, free 1916.2 MiB)
20:33:24.397 INFO  MemoryStore - Block broadcast_93_piece0 stored as bytes in memory (estimated size 233.0 B, free 1916.2 MiB)
20:33:24.397 INFO  BlockManagerInfo - Added broadcast_93_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.3 MiB)
20:33:24.397 INFO  SparkContext - Created broadcast 93 from broadcast at BamSource.java:104
20:33:24.399 INFO  MemoryStore - Block broadcast_94 stored as values in memory (estimated size 297.9 KiB, free 1915.9 MiB)
20:33:24.399 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741845_1021 replica FinalizedReplica, blk_1073741845_1021, FINALIZED
  getNumBytes()     = 212
  getBytesOnDisk()  = 212
  getVisibleLength()= 212
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741845 for deletion
20:33:24.399 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741853_1029 replica FinalizedReplica, blk_1073741853_1029, FINALIZED
  getNumBytes()     = 212
  getBytesOnDisk()  = 212
  getVisibleLength()= 212
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741853 for deletion
20:33:24.400 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741845_1021 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741845
20:33:24.400 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741853_1029 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741853
20:33:24.405 INFO  MemoryStore - Block broadcast_94_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1915.9 MiB)
20:33:24.406 INFO  BlockManagerInfo - Added broadcast_94_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.2 MiB)
20:33:24.406 INFO  SparkContext - Created broadcast 94 from newAPIHadoopFile at PathSplitSource.java:96
20:33:24.415 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.416 INFO  FileInputFormat - Total input files to process : 1
20:33:24.416 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.431 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:24.431 INFO  DAGScheduler - Got job 41 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:24.431 INFO  DAGScheduler - Final stage: ResultStage 56 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:24.431 INFO  DAGScheduler - Parents of final stage: List()
20:33:24.432 INFO  DAGScheduler - Missing parents: List()
20:33:24.432 INFO  DAGScheduler - Submitting ResultStage 56 (MapPartitionsRDD[215] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:24.438 INFO  MemoryStore - Block broadcast_95 stored as values in memory (estimated size 148.2 KiB, free 1915.7 MiB)
20:33:24.439 INFO  MemoryStore - Block broadcast_95_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1915.7 MiB)
20:33:24.439 INFO  BlockManagerInfo - Added broadcast_95_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.2 MiB)
20:33:24.439 INFO  SparkContext - Created broadcast 95 from broadcast at DAGScheduler.scala:1580
20:33:24.439 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 56 (MapPartitionsRDD[215] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:24.439 INFO  TaskSchedulerImpl - Adding task set 56.0 with 1 tasks resource profile 0
20:33:24.440 INFO  TaskSetManager - Starting task 0.0 in stage 56.0 (TID 94) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:24.440 INFO  Executor - Running task 0.0 in stage 56.0 (TID 94)
20:33:24.452 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam:0+235514
20:33:24.453 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.454 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.455 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.456 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.456 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.460 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:24.461 WARN  DFSUtil - Unexpected value for data transfer bytes=231570 duration=0
20:33:24.462 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:24.464 INFO  Executor - Finished task 0.0 in stage 56.0 (TID 94). 650184 bytes result sent to driver
20:33:24.466 INFO  TaskSetManager - Finished task 0.0 in stage 56.0 (TID 94) in 26 ms on localhost (executor driver) (1/1)
20:33:24.466 INFO  TaskSchedulerImpl - Removed TaskSet 56.0, whose tasks have all completed, from pool 
20:33:24.467 INFO  DAGScheduler - ResultStage 56 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.034 s
20:33:24.467 INFO  DAGScheduler - Job 41 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:24.467 INFO  TaskSchedulerImpl - Killing all running tasks in stage 56: Stage finished
20:33:24.467 INFO  DAGScheduler - Job 41 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.036026 s
20:33:24.477 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:24.477 INFO  DAGScheduler - Got job 42 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:24.477 INFO  DAGScheduler - Final stage: ResultStage 57 (count at ReadsSparkSinkUnitTest.java:185)
20:33:24.477 INFO  DAGScheduler - Parents of final stage: List()
20:33:24.477 INFO  DAGScheduler - Missing parents: List()
20:33:24.477 INFO  DAGScheduler - Submitting ResultStage 57 (MapPartitionsRDD[197] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:24.498 INFO  MemoryStore - Block broadcast_96 stored as values in memory (estimated size 426.1 KiB, free 1915.2 MiB)
20:33:24.500 INFO  MemoryStore - Block broadcast_96_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.1 MiB)
20:33:24.500 INFO  BlockManagerInfo - Added broadcast_96_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.0 MiB)
20:33:24.500 INFO  SparkContext - Created broadcast 96 from broadcast at DAGScheduler.scala:1580
20:33:24.500 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 57 (MapPartitionsRDD[197] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:24.500 INFO  TaskSchedulerImpl - Adding task set 57.0 with 1 tasks resource profile 0
20:33:24.501 INFO  TaskSetManager - Starting task 0.0 in stage 57.0 (TID 95) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7912 bytes) 
20:33:24.501 INFO  Executor - Running task 0.0 in stage 57.0 (TID 95)
20:33:24.540 INFO  BlockManagerInfo - Removed broadcast_83_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.1 MiB)
20:33:24.542 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:24.542 INFO  BlockManagerInfo - Removed broadcast_86_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.2 MiB)
20:33:24.543 INFO  BlockManagerInfo - Removed broadcast_91_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.4 MiB)
20:33:24.544 INFO  BlockManagerInfo - Removed broadcast_95_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.4 MiB)
20:33:24.545 INFO  BlockManagerInfo - Removed broadcast_88_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.5 MiB)
20:33:24.546 INFO  BlockManagerInfo - Removed broadcast_77_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:24.546 INFO  BlockManagerInfo - Removed broadcast_90_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.5 MiB)
20:33:24.547 INFO  BlockManagerInfo - Removed broadcast_85_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:24.549 INFO  BlockManagerInfo - Removed broadcast_89_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:24.549 INFO  BlockManagerInfo - Removed broadcast_92_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.8 MiB)
20:33:24.556 INFO  Executor - Finished task 0.0 in stage 57.0 (TID 95). 1032 bytes result sent to driver
20:33:24.556 INFO  TaskSetManager - Finished task 0.0 in stage 57.0 (TID 95) in 55 ms on localhost (executor driver) (1/1)
20:33:24.556 INFO  TaskSchedulerImpl - Removed TaskSet 57.0, whose tasks have all completed, from pool 
20:33:24.557 INFO  DAGScheduler - ResultStage 57 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.079 s
20:33:24.557 INFO  DAGScheduler - Job 42 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:24.557 INFO  TaskSchedulerImpl - Killing all running tasks in stage 57: Stage finished
20:33:24.557 INFO  DAGScheduler - Job 42 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.079999 s
20:33:24.560 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:24.561 INFO  DAGScheduler - Got job 43 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:24.561 INFO  DAGScheduler - Final stage: ResultStage 58 (count at ReadsSparkSinkUnitTest.java:185)
20:33:24.561 INFO  DAGScheduler - Parents of final stage: List()
20:33:24.561 INFO  DAGScheduler - Missing parents: List()
20:33:24.561 INFO  DAGScheduler - Submitting ResultStage 58 (MapPartitionsRDD[215] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:24.568 INFO  MemoryStore - Block broadcast_97 stored as values in memory (estimated size 148.1 KiB, free 1918.6 MiB)
20:33:24.568 INFO  MemoryStore - Block broadcast_97_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.6 MiB)
20:33:24.569 INFO  BlockManagerInfo - Added broadcast_97_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.7 MiB)
20:33:24.569 INFO  SparkContext - Created broadcast 97 from broadcast at DAGScheduler.scala:1580
20:33:24.569 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 58 (MapPartitionsRDD[215] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:24.569 INFO  TaskSchedulerImpl - Adding task set 58.0 with 1 tasks resource profile 0
20:33:24.570 INFO  TaskSetManager - Starting task 0.0 in stage 58.0 (TID 96) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:24.570 INFO  Executor - Running task 0.0 in stage 58.0 (TID 96)
20:33:24.583 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam:0+235514
20:33:24.585 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.585 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam	dst=null	perm=null	proto=rpc
20:33:24.586 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.587 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.587 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5d755d71-969c-4474-be70-f51b934637ce.bam.bai	dst=null	perm=null	proto=rpc
20:33:24.589 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:24.591 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:24.591 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:24.592 WARN  DFSUtil - Unexpected value for data transfer bytes=231570 duration=0
20:33:24.593 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:24.594 INFO  Executor - Finished task 0.0 in stage 58.0 (TID 96). 989 bytes result sent to driver
20:33:24.595 INFO  TaskSetManager - Finished task 0.0 in stage 58.0 (TID 96) in 25 ms on localhost (executor driver) (1/1)
20:33:24.595 INFO  TaskSchedulerImpl - Removed TaskSet 58.0, whose tasks have all completed, from pool 
20:33:24.595 INFO  DAGScheduler - ResultStage 58 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.033 s
20:33:24.595 INFO  DAGScheduler - Job 43 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:24.595 INFO  TaskSchedulerImpl - Killing all running tasks in stage 58: Stage finished
20:33:24.596 INFO  DAGScheduler - Job 43 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.035167 s
20:33:24.599 INFO  MemoryStore - Block broadcast_98 stored as values in memory (estimated size 298.0 KiB, free 1918.3 MiB)
20:33:24.606 INFO  MemoryStore - Block broadcast_98_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:24.606 INFO  BlockManagerInfo - Added broadcast_98_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:24.606 INFO  SparkContext - Created broadcast 98 from newAPIHadoopFile at PathSplitSource.java:96
20:33:24.628 INFO  MemoryStore - Block broadcast_99 stored as values in memory (estimated size 298.0 KiB, free 1917.9 MiB)
20:33:24.635 INFO  MemoryStore - Block broadcast_99_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:24.635 INFO  BlockManagerInfo - Added broadcast_99_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:24.635 INFO  SparkContext - Created broadcast 99 from newAPIHadoopFile at PathSplitSource.java:96
20:33:24.656 INFO  FileInputFormat - Total input files to process : 1
20:33:24.657 INFO  MemoryStore - Block broadcast_100 stored as values in memory (estimated size 19.6 KiB, free 1917.9 MiB)
20:33:24.658 INFO  MemoryStore - Block broadcast_100_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1917.9 MiB)
20:33:24.658 INFO  BlockManagerInfo - Added broadcast_100_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.6 MiB)
20:33:24.658 INFO  SparkContext - Created broadcast 100 from broadcast at ReadsSparkSink.java:133
20:33:24.659 INFO  MemoryStore - Block broadcast_101 stored as values in memory (estimated size 20.0 KiB, free 1917.8 MiB)
20:33:24.659 INFO  MemoryStore - Block broadcast_101_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1917.8 MiB)
20:33:24.659 INFO  BlockManagerInfo - Added broadcast_101_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.6 MiB)
20:33:24.659 INFO  SparkContext - Created broadcast 101 from broadcast at BamSink.java:76
20:33:24.662 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts	dst=null	perm=null	proto=rpc
20:33:24.662 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:24.662 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:24.662 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:24.663 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:24.669 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:24.670 INFO  DAGScheduler - Registering RDD 229 (mapToPair at SparkUtils.java:161) as input to shuffle 13
20:33:24.670 INFO  DAGScheduler - Got job 44 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:24.670 INFO  DAGScheduler - Final stage: ResultStage 60 (runJob at SparkHadoopWriter.scala:83)
20:33:24.670 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 59)
20:33:24.670 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 59)
20:33:24.670 INFO  DAGScheduler - Submitting ShuffleMapStage 59 (MapPartitionsRDD[229] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:24.690 INFO  MemoryStore - Block broadcast_102 stored as values in memory (estimated size 434.3 KiB, free 1917.4 MiB)
20:33:24.691 INFO  MemoryStore - Block broadcast_102_piece0 stored as bytes in memory (estimated size 157.6 KiB, free 1917.3 MiB)
20:33:24.692 INFO  BlockManagerInfo - Added broadcast_102_piece0 in memory on localhost:45281 (size: 157.6 KiB, free: 1919.4 MiB)
20:33:24.692 INFO  SparkContext - Created broadcast 102 from broadcast at DAGScheduler.scala:1580
20:33:24.692 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 59 (MapPartitionsRDD[229] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:24.692 INFO  TaskSchedulerImpl - Adding task set 59.0 with 1 tasks resource profile 0
20:33:24.693 INFO  TaskSetManager - Starting task 0.0 in stage 59.0 (TID 97) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7882 bytes) 
20:33:24.693 INFO  Executor - Running task 0.0 in stage 59.0 (TID 97)
20:33:24.725 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:24.742 INFO  Executor - Finished task 0.0 in stage 59.0 (TID 97). 1148 bytes result sent to driver
20:33:24.743 INFO  TaskSetManager - Finished task 0.0 in stage 59.0 (TID 97) in 50 ms on localhost (executor driver) (1/1)
20:33:24.743 INFO  TaskSchedulerImpl - Removed TaskSet 59.0, whose tasks have all completed, from pool 
20:33:24.743 INFO  DAGScheduler - ShuffleMapStage 59 (mapToPair at SparkUtils.java:161) finished in 0.072 s
20:33:24.743 INFO  DAGScheduler - looking for newly runnable stages
20:33:24.743 INFO  DAGScheduler - running: HashSet()
20:33:24.743 INFO  DAGScheduler - waiting: HashSet(ResultStage 60)
20:33:24.743 INFO  DAGScheduler - failed: HashSet()
20:33:24.743 INFO  DAGScheduler - Submitting ResultStage 60 (MapPartitionsRDD[234] at mapToPair at BamSink.java:91), which has no missing parents
20:33:24.751 INFO  MemoryStore - Block broadcast_103 stored as values in memory (estimated size 155.4 KiB, free 1917.1 MiB)
20:33:24.751 INFO  MemoryStore - Block broadcast_103_piece0 stored as bytes in memory (estimated size 58.5 KiB, free 1917.0 MiB)
20:33:24.752 INFO  BlockManagerInfo - Added broadcast_103_piece0 in memory on localhost:45281 (size: 58.5 KiB, free: 1919.4 MiB)
20:33:24.752 INFO  SparkContext - Created broadcast 103 from broadcast at DAGScheduler.scala:1580
20:33:24.752 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 60 (MapPartitionsRDD[234] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:24.752 INFO  TaskSchedulerImpl - Adding task set 60.0 with 1 tasks resource profile 0
20:33:24.753 INFO  TaskSetManager - Starting task 0.0 in stage 60.0 (TID 98) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:24.753 INFO  Executor - Running task 0.0 in stage 60.0 (TID 98)
20:33:24.757 INFO  ShuffleBlockFetcherIterator - Getting 1 (312.6 KiB) non-empty blocks including 1 (312.6 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:24.758 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:24.770 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:24.770 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:24.770 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:24.771 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:24.771 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:24.771 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:24.772 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:24.773 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:24.774 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:24.777 INFO  StateChange - BLOCK* allocate blk_1073741859_1035, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/part-r-00000
20:33:24.778 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741859_1035 src: /127.0.0.1:45310 dest: /127.0.0.1:35765
20:33:24.782 INFO  clienttrace - src: /127.0.0.1:45310, dest: /127.0.0.1:35765, bytes: 235299, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741859_1035, duration(ns): 2672889
20:33:24.782 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741859_1035, type=LAST_IN_PIPELINE terminating
20:33:24.783 INFO  FSNamesystem - BLOCK* blk_1073741859_1035 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/part-r-00000
20:33:25.184 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:25.185 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:25.186 INFO  StateChange - BLOCK* allocate blk_1073741860_1036, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.sbi
20:33:25.187 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741860_1036 src: /127.0.0.1:45318 dest: /127.0.0.1:35765
20:33:25.188 INFO  clienttrace - src: /127.0.0.1:45318, dest: /127.0.0.1:35765, bytes: 204, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741860_1036, duration(ns): 472431
20:33:25.188 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741860_1036, type=LAST_IN_PIPELINE terminating
20:33:25.189 INFO  FSNamesystem - BLOCK* blk_1073741860_1036 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.sbi
20:33:25.590 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:25.591 INFO  StateChange - BLOCK* allocate blk_1073741861_1037, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.bai
20:33:25.592 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741861_1037 src: /127.0.0.1:45324 dest: /127.0.0.1:35765
20:33:25.593 INFO  clienttrace - src: /127.0.0.1:45324, dest: /127.0.0.1:35765, bytes: 592, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741861_1037, duration(ns): 427118
20:33:25.593 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741861_1037, type=LAST_IN_PIPELINE terminating
20:33:25.593 INFO  FSNamesystem - BLOCK* blk_1073741861_1037 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.bai
20:33:25.994 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:25.996 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0	dst=null	perm=null	proto=rpc
20:33:25.996 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0	dst=null	perm=null	proto=rpc
20:33:25.997 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000	dst=null	perm=null	proto=rpc
20:33:25.998 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/_temporary/attempt_20250715203324546826086582639555_0234_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:25.998 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203324546826086582639555_0234_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000
20:33:25.998 INFO  SparkHadoopMapRedUtil - attempt_20250715203324546826086582639555_0234_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:25.999 INFO  Executor - Finished task 0.0 in stage 60.0 (TID 98). 1858 bytes result sent to driver
20:33:25.999 INFO  TaskSetManager - Finished task 0.0 in stage 60.0 (TID 98) in 1246 ms on localhost (executor driver) (1/1)
20:33:25.999 INFO  TaskSchedulerImpl - Removed TaskSet 60.0, whose tasks have all completed, from pool 
20:33:26.000 INFO  DAGScheduler - ResultStage 60 (runJob at SparkHadoopWriter.scala:83) finished in 1.255 s
20:33:26.000 INFO  DAGScheduler - Job 44 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:26.000 INFO  TaskSchedulerImpl - Killing all running tasks in stage 60: Stage finished
20:33:26.000 INFO  DAGScheduler - Job 44 finished: runJob at SparkHadoopWriter.scala:83, took 1.330751 s
20:33:26.000 INFO  SparkHadoopWriter - Start to commit write Job job_20250715203324546826086582639555_0234.
20:33:26.001 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:26.002 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts	dst=null	perm=null	proto=rpc
20:33:26.002 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000	dst=null	perm=null	proto=rpc
20:33:26.003 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:26.003 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.004 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:26.005 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.005 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:26.006 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary/0/task_20250715203324546826086582639555_0234_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.006 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:26.007 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.008 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:26.009 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.spark-staging-234	dst=null	perm=null	proto=rpc
20:33:26.009 INFO  SparkHadoopWriter - Write Job job_20250715203324546826086582639555_0234 committed. Elapsed time: 8 ms.
20:33:26.010 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.011 INFO  StateChange - BLOCK* allocate blk_1073741862_1038, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/header
20:33:26.012 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741862_1038 src: /127.0.0.1:45326 dest: /127.0.0.1:35765
20:33:26.014 INFO  clienttrace - src: /127.0.0.1:45326, dest: /127.0.0.1:35765, bytes: 1190, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741862_1038, duration(ns): 553242
20:33:26.014 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741862_1038, type=LAST_IN_PIPELINE terminating
20:33:26.015 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:26.015 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.016 INFO  StateChange - BLOCK* allocate blk_1073741863_1039, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/terminator
20:33:26.017 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741863_1039 src: /127.0.0.1:45338 dest: /127.0.0.1:35765
20:33:26.018 INFO  clienttrace - src: /127.0.0.1:45338, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741863_1039, duration(ns): 422628
20:33:26.018 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741863_1039, type=LAST_IN_PIPELINE terminating
20:33:26.019 INFO  FSNamesystem - BLOCK* blk_1073741863_1039 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/terminator
20:33:26.420 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:26.421 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts	dst=null	perm=null	proto=rpc
20:33:26.422 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.423 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:26.423 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam
20:33:26.423 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.424 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.425 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.425 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam done
20:33:26.425 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.425 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi
20:33:26.426 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts	dst=null	perm=null	proto=rpc
20:33:26.426 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.427 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:26.428 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:26.429 WARN  DFSUtil - Unexpected value for data transfer bytes=208 duration=0
20:33:26.430 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:26.431 INFO  StateChange - BLOCK* allocate blk_1073741864_1040, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi
20:33:26.431 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741864_1040 src: /127.0.0.1:45348 dest: /127.0.0.1:35765
20:33:26.433 INFO  clienttrace - src: /127.0.0.1:45348, dest: /127.0.0.1:35765, bytes: 204, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741864_1040, duration(ns): 450370
20:33:26.433 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741864_1040, type=LAST_IN_PIPELINE terminating
20:33:26.434 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:26.434 INFO  IndexFileMerger - Done merging .sbi files
20:33:26.434 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai
20:33:26.434 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts	dst=null	perm=null	proto=rpc
20:33:26.435 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.436 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:26.436 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:26.437 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:26.438 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:26.439 INFO  StateChange - BLOCK* allocate blk_1073741865_1041, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai
20:33:26.439 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741865_1041 src: /127.0.0.1:45356 dest: /127.0.0.1:35765
20:33:26.440 INFO  clienttrace - src: /127.0.0.1:45356, dest: /127.0.0.1:35765, bytes: 592, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741865_1041, duration(ns): 362152
20:33:26.440 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741865_1041, type=LAST_IN_PIPELINE terminating
20:33:26.441 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:26.441 INFO  IndexFileMerger - Done merging .bai files
20:33:26.442 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.parts	dst=null	perm=null	proto=rpc
20:33:26.451 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.459 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=null	proto=rpc
20:33:26.459 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=null	proto=rpc
20:33:26.460 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=null	proto=rpc
20:33:26.461 WARN  DFSUtil - Unexpected value for data transfer bytes=208 duration=0
20:33:26.461 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.463 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.463 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.464 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.464 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.465 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.465 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.467 WARN  DFSUtil - Unexpected value for data transfer bytes=1202 duration=0
20:33:26.468 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:26.469 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:26.469 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=null	proto=rpc
20:33:26.470 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=null	proto=rpc
20:33:26.470 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.sbi	dst=null	perm=null	proto=rpc
20:33:26.471 WARN  DFSUtil - Unexpected value for data transfer bytes=208 duration=0
20:33:26.471 INFO  MemoryStore - Block broadcast_104 stored as values in memory (estimated size 312.0 B, free 1917.0 MiB)
20:33:26.472 INFO  MemoryStore - Block broadcast_104_piece0 stored as bytes in memory (estimated size 231.0 B, free 1917.0 MiB)
20:33:26.472 INFO  BlockManagerInfo - Added broadcast_104_piece0 in memory on localhost:45281 (size: 231.0 B, free: 1919.4 MiB)
20:33:26.472 INFO  SparkContext - Created broadcast 104 from broadcast at BamSource.java:104
20:33:26.474 INFO  MemoryStore - Block broadcast_105 stored as values in memory (estimated size 297.9 KiB, free 1916.8 MiB)
20:33:26.480 INFO  MemoryStore - Block broadcast_105_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.7 MiB)
20:33:26.480 INFO  BlockManagerInfo - Added broadcast_105_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:26.480 INFO  SparkContext - Created broadcast 105 from newAPIHadoopFile at PathSplitSource.java:96
20:33:26.490 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.490 INFO  FileInputFormat - Total input files to process : 1
20:33:26.491 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.511 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:26.511 INFO  DAGScheduler - Got job 45 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:26.511 INFO  DAGScheduler - Final stage: ResultStage 61 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:26.511 INFO  DAGScheduler - Parents of final stage: List()
20:33:26.511 INFO  DAGScheduler - Missing parents: List()
20:33:26.511 INFO  DAGScheduler - Submitting ResultStage 61 (MapPartitionsRDD[240] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:26.518 INFO  MemoryStore - Block broadcast_106 stored as values in memory (estimated size 148.2 KiB, free 1916.6 MiB)
20:33:26.519 INFO  MemoryStore - Block broadcast_106_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1916.5 MiB)
20:33:26.519 INFO  BlockManagerInfo - Added broadcast_106_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.3 MiB)
20:33:26.519 INFO  SparkContext - Created broadcast 106 from broadcast at DAGScheduler.scala:1580
20:33:26.520 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 61 (MapPartitionsRDD[240] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:26.520 INFO  TaskSchedulerImpl - Adding task set 61.0 with 1 tasks resource profile 0
20:33:26.520 INFO  TaskSetManager - Starting task 0.0 in stage 61.0 (TID 99) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:26.521 INFO  Executor - Running task 0.0 in stage 61.0 (TID 99)
20:33:26.532 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam:0+236517
20:33:26.533 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.534 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.535 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.536 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.536 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.538 WARN  DFSUtil - Unexpected value for data transfer bytes=1202 duration=0
20:33:26.539 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:26.541 WARN  DFSUtil - Unexpected value for data transfer bytes=237139 duration=0
20:33:26.541 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:26.543 INFO  Executor - Finished task 0.0 in stage 61.0 (TID 99). 749470 bytes result sent to driver
20:33:26.545 INFO  TaskSetManager - Finished task 0.0 in stage 61.0 (TID 99) in 25 ms on localhost (executor driver) (1/1)
20:33:26.545 INFO  TaskSchedulerImpl - Removed TaskSet 61.0, whose tasks have all completed, from pool 
20:33:26.545 INFO  DAGScheduler - ResultStage 61 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.033 s
20:33:26.545 INFO  DAGScheduler - Job 45 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:26.545 INFO  TaskSchedulerImpl - Killing all running tasks in stage 61: Stage finished
20:33:26.545 INFO  DAGScheduler - Job 45 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.034753 s
20:33:26.560 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:26.561 INFO  DAGScheduler - Got job 46 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:26.561 INFO  DAGScheduler - Final stage: ResultStage 62 (count at ReadsSparkSinkUnitTest.java:185)
20:33:26.561 INFO  DAGScheduler - Parents of final stage: List()
20:33:26.561 INFO  DAGScheduler - Missing parents: List()
20:33:26.561 INFO  DAGScheduler - Submitting ResultStage 62 (MapPartitionsRDD[222] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:26.578 INFO  MemoryStore - Block broadcast_107 stored as values in memory (estimated size 426.1 KiB, free 1916.1 MiB)
20:33:26.580 INFO  MemoryStore - Block broadcast_107_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.9 MiB)
20:33:26.580 INFO  BlockManagerInfo - Added broadcast_107_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.1 MiB)
20:33:26.580 INFO  SparkContext - Created broadcast 107 from broadcast at DAGScheduler.scala:1580
20:33:26.580 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 62 (MapPartitionsRDD[222] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:26.580 INFO  TaskSchedulerImpl - Adding task set 62.0 with 1 tasks resource profile 0
20:33:26.581 INFO  TaskSetManager - Starting task 0.0 in stage 62.0 (TID 100) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7893 bytes) 
20:33:26.581 INFO  Executor - Running task 0.0 in stage 62.0 (TID 100)
20:33:26.612 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:26.621 INFO  Executor - Finished task 0.0 in stage 62.0 (TID 100). 989 bytes result sent to driver
20:33:26.622 INFO  TaskSetManager - Finished task 0.0 in stage 62.0 (TID 100) in 41 ms on localhost (executor driver) (1/1)
20:33:26.622 INFO  TaskSchedulerImpl - Removed TaskSet 62.0, whose tasks have all completed, from pool 
20:33:26.622 INFO  DAGScheduler - ResultStage 62 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.061 s
20:33:26.622 INFO  DAGScheduler - Job 46 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:26.622 INFO  TaskSchedulerImpl - Killing all running tasks in stage 62: Stage finished
20:33:26.623 INFO  DAGScheduler - Job 46 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.062301 s
20:33:26.626 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:26.626 INFO  DAGScheduler - Got job 47 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:26.626 INFO  DAGScheduler - Final stage: ResultStage 63 (count at ReadsSparkSinkUnitTest.java:185)
20:33:26.626 INFO  DAGScheduler - Parents of final stage: List()
20:33:26.627 INFO  DAGScheduler - Missing parents: List()
20:33:26.627 INFO  DAGScheduler - Submitting ResultStage 63 (MapPartitionsRDD[240] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:26.637 INFO  MemoryStore - Block broadcast_108 stored as values in memory (estimated size 148.1 KiB, free 1915.8 MiB)
20:33:26.638 INFO  MemoryStore - Block broadcast_108_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1915.7 MiB)
20:33:26.639 INFO  BlockManagerInfo - Added broadcast_108_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.1 MiB)
20:33:26.639 INFO  SparkContext - Created broadcast 108 from broadcast at DAGScheduler.scala:1580
20:33:26.639 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 63 (MapPartitionsRDD[240] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:26.639 INFO  TaskSchedulerImpl - Adding task set 63.0 with 1 tasks resource profile 0
20:33:26.640 INFO  TaskSetManager - Starting task 0.0 in stage 63.0 (TID 101) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:26.640 INFO  Executor - Running task 0.0 in stage 63.0 (TID 101)
20:33:26.652 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam:0+236517
20:33:26.653 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.654 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam	dst=null	perm=null	proto=rpc
20:33:26.655 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.655 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.656 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_370ee9aa-19f6-44d3-a90d-7e64c553d8c1.bam.bai	dst=null	perm=null	proto=rpc
20:33:26.657 WARN  DFSUtil - Unexpected value for data transfer bytes=1202 duration=0
20:33:26.659 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:26.660 WARN  DFSUtil - Unexpected value for data transfer bytes=237139 duration=0
20:33:26.663 INFO  Executor - Finished task 0.0 in stage 63.0 (TID 101). 989 bytes result sent to driver
20:33:26.663 INFO  TaskSetManager - Finished task 0.0 in stage 63.0 (TID 101) in 23 ms on localhost (executor driver) (1/1)
20:33:26.663 INFO  TaskSchedulerImpl - Removed TaskSet 63.0, whose tasks have all completed, from pool 
20:33:26.663 INFO  DAGScheduler - ResultStage 63 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.036 s
20:33:26.663 INFO  DAGScheduler - Job 47 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:26.663 INFO  TaskSchedulerImpl - Killing all running tasks in stage 63: Stage finished
20:33:26.664 INFO  DAGScheduler - Job 47 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.037476 s
20:33:26.669 INFO  MemoryStore - Block broadcast_109 stored as values in memory (estimated size 576.0 B, free 1915.7 MiB)
20:33:26.678 INFO  MemoryStore - Block broadcast_109_piece0 stored as bytes in memory (estimated size 228.0 B, free 1915.7 MiB)
20:33:26.678 INFO  BlockManagerInfo - Added broadcast_109_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.1 MiB)
20:33:26.678 INFO  SparkContext - Created broadcast 109 from broadcast at CramSource.java:114
20:33:26.679 INFO  BlockManagerInfo - Removed broadcast_100_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.1 MiB)
20:33:26.680 INFO  BlockManagerInfo - Removed broadcast_104_piece0 on localhost:45281 in memory (size: 231.0 B, free: 1919.1 MiB)
20:33:26.680 INFO  MemoryStore - Block broadcast_110 stored as values in memory (estimated size 297.9 KiB, free 1915.5 MiB)
20:33:26.680 INFO  BlockManagerInfo - Removed broadcast_105_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.1 MiB)
20:33:26.681 INFO  BlockManagerInfo - Removed broadcast_98_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:26.682 INFO  BlockManagerInfo - Removed broadcast_93_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.2 MiB)
20:33:26.682 INFO  BlockManagerInfo - Removed broadcast_99_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:26.683 INFO  BlockManagerInfo - Removed broadcast_106_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.3 MiB)
20:33:26.684 INFO  BlockManagerInfo - Removed broadcast_97_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.3 MiB)
20:33:26.684 INFO  BlockManagerInfo - Removed broadcast_103_piece0 on localhost:45281 in memory (size: 58.5 KiB, free: 1919.4 MiB)
20:33:26.685 INFO  BlockManagerInfo - Removed broadcast_107_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.5 MiB)
20:33:26.685 INFO  BlockManagerInfo - Removed broadcast_94_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:26.686 INFO  BlockManagerInfo - Removed broadcast_96_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:26.687 INFO  BlockManagerInfo - Removed broadcast_87_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.8 MiB)
20:33:26.688 INFO  BlockManagerInfo - Removed broadcast_108_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.8 MiB)
20:33:26.688 INFO  BlockManagerInfo - Removed broadcast_101_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.8 MiB)
20:33:26.689 INFO  BlockManagerInfo - Removed broadcast_102_piece0 on localhost:45281 in memory (size: 157.6 KiB, free: 1920.0 MiB)
20:33:26.691 INFO  MemoryStore - Block broadcast_110_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.7 MiB)
20:33:26.691 INFO  BlockManagerInfo - Added broadcast_110_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1920.0 MiB)
20:33:26.691 INFO  SparkContext - Created broadcast 110 from newAPIHadoopFile at PathSplitSource.java:96
20:33:26.708 INFO  MemoryStore - Block broadcast_111 stored as values in memory (estimated size 576.0 B, free 1919.7 MiB)
20:33:26.709 INFO  MemoryStore - Block broadcast_111_piece0 stored as bytes in memory (estimated size 228.0 B, free 1919.7 MiB)
20:33:26.709 INFO  BlockManagerInfo - Added broadcast_111_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1920.0 MiB)
20:33:26.709 INFO  SparkContext - Created broadcast 111 from broadcast at CramSource.java:114
20:33:26.710 INFO  MemoryStore - Block broadcast_112 stored as values in memory (estimated size 297.9 KiB, free 1919.4 MiB)
20:33:26.716 INFO  MemoryStore - Block broadcast_112_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:26.717 INFO  BlockManagerInfo - Added broadcast_112_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:26.717 INFO  SparkContext - Created broadcast 112 from newAPIHadoopFile at PathSplitSource.java:96
20:33:26.732 INFO  FileInputFormat - Total input files to process : 1
20:33:26.733 INFO  MemoryStore - Block broadcast_113 stored as values in memory (estimated size 6.0 KiB, free 1919.3 MiB)
20:33:26.734 INFO  MemoryStore - Block broadcast_113_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1919.3 MiB)
20:33:26.734 INFO  BlockManagerInfo - Added broadcast_113_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.9 MiB)
20:33:26.734 INFO  SparkContext - Created broadcast 113 from broadcast at ReadsSparkSink.java:133
20:33:26.735 INFO  MemoryStore - Block broadcast_114 stored as values in memory (estimated size 6.2 KiB, free 1919.3 MiB)
20:33:26.736 INFO  MemoryStore - Block broadcast_114_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1919.3 MiB)
20:33:26.736 INFO  BlockManagerInfo - Added broadcast_114_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.9 MiB)
20:33:26.736 INFO  SparkContext - Created broadcast 114 from broadcast at CramSink.java:76
20:33:26.741 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts	dst=null	perm=null	proto=rpc
20:33:26.742 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:26.742 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:26.742 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:26.743 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:26.749 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:26.749 INFO  DAGScheduler - Registering RDD 252 (mapToPair at SparkUtils.java:161) as input to shuffle 14
20:33:26.750 INFO  DAGScheduler - Got job 48 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:26.750 INFO  DAGScheduler - Final stage: ResultStage 65 (runJob at SparkHadoopWriter.scala:83)
20:33:26.750 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 64)
20:33:26.750 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 64)
20:33:26.750 INFO  DAGScheduler - Submitting ShuffleMapStage 64 (MapPartitionsRDD[252] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:26.762 INFO  MemoryStore - Block broadcast_115 stored as values in memory (estimated size 292.8 KiB, free 1919.0 MiB)
20:33:26.763 INFO  MemoryStore - Block broadcast_115_piece0 stored as bytes in memory (estimated size 107.3 KiB, free 1918.9 MiB)
20:33:26.763 INFO  BlockManagerInfo - Added broadcast_115_piece0 in memory on localhost:45281 (size: 107.3 KiB, free: 1919.8 MiB)
20:33:26.763 INFO  SparkContext - Created broadcast 115 from broadcast at DAGScheduler.scala:1580
20:33:26.764 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 64 (MapPartitionsRDD[252] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:26.764 INFO  TaskSchedulerImpl - Adding task set 64.0 with 1 tasks resource profile 0
20:33:26.764 INFO  TaskSetManager - Starting task 0.0 in stage 64.0 (TID 102) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7869 bytes) 
20:33:26.765 INFO  Executor - Running task 0.0 in stage 64.0 (TID 102)
20:33:26.787 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:26.810 INFO  Executor - Finished task 0.0 in stage 64.0 (TID 102). 1148 bytes result sent to driver
20:33:26.811 INFO  TaskSetManager - Finished task 0.0 in stage 64.0 (TID 102) in 46 ms on localhost (executor driver) (1/1)
20:33:26.811 INFO  TaskSchedulerImpl - Removed TaskSet 64.0, whose tasks have all completed, from pool 
20:33:26.811 INFO  DAGScheduler - ShuffleMapStage 64 (mapToPair at SparkUtils.java:161) finished in 0.061 s
20:33:26.811 INFO  DAGScheduler - looking for newly runnable stages
20:33:26.811 INFO  DAGScheduler - running: HashSet()
20:33:26.811 INFO  DAGScheduler - waiting: HashSet(ResultStage 65)
20:33:26.811 INFO  DAGScheduler - failed: HashSet()
20:33:26.811 INFO  DAGScheduler - Submitting ResultStage 65 (MapPartitionsRDD[257] at mapToPair at CramSink.java:89), which has no missing parents
20:33:26.818 INFO  MemoryStore - Block broadcast_116 stored as values in memory (estimated size 153.3 KiB, free 1918.8 MiB)
20:33:26.819 INFO  MemoryStore - Block broadcast_116_piece0 stored as bytes in memory (estimated size 58.1 KiB, free 1918.7 MiB)
20:33:26.819 INFO  BlockManagerInfo - Added broadcast_116_piece0 in memory on localhost:45281 (size: 58.1 KiB, free: 1919.7 MiB)
20:33:26.819 INFO  SparkContext - Created broadcast 116 from broadcast at DAGScheduler.scala:1580
20:33:26.819 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 65 (MapPartitionsRDD[257] at mapToPair at CramSink.java:89) (first 15 tasks are for partitions Vector(0))
20:33:26.820 INFO  TaskSchedulerImpl - Adding task set 65.0 with 1 tasks resource profile 0
20:33:26.820 INFO  TaskSetManager - Starting task 0.0 in stage 65.0 (TID 103) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:26.821 INFO  Executor - Running task 0.0 in stage 65.0 (TID 103)
20:33:26.828 INFO  ShuffleBlockFetcherIterator - Getting 1 (82.3 KiB) non-empty blocks including 1 (82.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:26.828 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:26.838 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:26.838 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:26.838 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:26.838 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:26.838 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:26.838 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:26.842 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:26.956 INFO  StateChange - BLOCK* allocate blk_1073741866_1042, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0/part-r-00000
20:33:26.957 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741866_1042 src: /127.0.0.1:45370 dest: /127.0.0.1:35765
20:33:26.958 INFO  clienttrace - src: /127.0.0.1:45370, dest: /127.0.0.1:35765, bytes: 42659, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741866_1042, duration(ns): 553623
20:33:26.958 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741866_1042, type=LAST_IN_PIPELINE terminating
20:33:26.959 INFO  FSNamesystem - BLOCK* blk_1073741866_1042 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0/part-r-00000
20:33:27.360 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:27.361 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0	dst=null	perm=null	proto=rpc
20:33:27.362 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0	dst=null	perm=null	proto=rpc
20:33:27.362 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/task_202507152033264077193816748262554_0257_r_000000	dst=null	perm=null	proto=rpc
20:33:27.363 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/_temporary/attempt_202507152033264077193816748262554_0257_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/task_202507152033264077193816748262554_0257_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:27.363 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033264077193816748262554_0257_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/task_202507152033264077193816748262554_0257_r_000000
20:33:27.363 INFO  SparkHadoopMapRedUtil - attempt_202507152033264077193816748262554_0257_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:27.364 INFO  Executor - Finished task 0.0 in stage 65.0 (TID 103). 1858 bytes result sent to driver
20:33:27.365 INFO  TaskSetManager - Finished task 0.0 in stage 65.0 (TID 103) in 545 ms on localhost (executor driver) (1/1)
20:33:27.365 INFO  TaskSchedulerImpl - Removed TaskSet 65.0, whose tasks have all completed, from pool 
20:33:27.365 INFO  DAGScheduler - ResultStage 65 (runJob at SparkHadoopWriter.scala:83) finished in 0.553 s
20:33:27.365 INFO  DAGScheduler - Job 48 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:27.365 INFO  TaskSchedulerImpl - Killing all running tasks in stage 65: Stage finished
20:33:27.365 INFO  DAGScheduler - Job 48 finished: runJob at SparkHadoopWriter.scala:83, took 0.616594 s
20:33:27.366 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033264077193816748262554_0257.
20:33:27.366 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:27.367 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts	dst=null	perm=null	proto=rpc
20:33:27.367 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/task_202507152033264077193816748262554_0257_r_000000	dst=null	perm=null	proto=rpc
20:33:27.368 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:27.369 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary/0/task_202507152033264077193816748262554_0257_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.369 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:27.370 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.371 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:27.372 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/.spark-staging-257	dst=null	perm=null	proto=rpc
20:33:27.372 INFO  SparkHadoopWriter - Write Job job_202507152033264077193816748262554_0257 committed. Elapsed time: 5 ms.
20:33:27.372 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.375 INFO  StateChange - BLOCK* allocate blk_1073741867_1043, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/header
20:33:27.376 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741867_1043 src: /127.0.0.1:55328 dest: /127.0.0.1:35765
20:33:27.377 INFO  clienttrace - src: /127.0.0.1:55328, dest: /127.0.0.1:35765, bytes: 1016, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741867_1043, duration(ns): 584256
20:33:27.377 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741867_1043, type=LAST_IN_PIPELINE terminating
20:33:27.378 INFO  FSNamesystem - BLOCK* blk_1073741867_1043 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/header
20:33:27.399 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741860_1036 replica FinalizedReplica, blk_1073741860_1036, FINALIZED
  getNumBytes()     = 204
  getBytesOnDisk()  = 204
  getVisibleLength()= 204
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741860 for deletion
20:33:27.399 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741861_1037 replica FinalizedReplica, blk_1073741861_1037, FINALIZED
  getNumBytes()     = 592
  getBytesOnDisk()  = 592
  getVisibleLength()= 592
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741861 for deletion
20:33:27.399 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741854_1030 replica FinalizedReplica, blk_1073741854_1030, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741854 for deletion
20:33:27.400 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741860_1036 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741860
20:33:27.400 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741861_1037 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741861
20:33:27.400 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741854_1030 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741854
20:33:27.779 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:27.781 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.782 INFO  StateChange - BLOCK* allocate blk_1073741868_1044, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/terminator
20:33:27.783 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741868_1044 src: /127.0.0.1:55344 dest: /127.0.0.1:35765
20:33:27.784 INFO  clienttrace - src: /127.0.0.1:55344, dest: /127.0.0.1:35765, bytes: 38, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741868_1044, duration(ns): 461427
20:33:27.784 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741868_1044, type=LAST_IN_PIPELINE terminating
20:33:27.785 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:27.786 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts	dst=null	perm=null	proto=rpc
20:33:27.787 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.788 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:27.788 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram
20:33:27.788 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/header, /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.789 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.789 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:27.790 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram done
20:33:27.790 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.parts	dst=null	perm=null	proto=rpc
20:33:27.791 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.791 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.791 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.792 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.793 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.crai	dst=null	perm=null	proto=rpc
20:33:27.793 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.crai	dst=null	perm=null	proto=rpc
20:33:27.795 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:27.796 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:27.796 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:27.797 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.797 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.798 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.crai	dst=null	perm=null	proto=rpc
20:33:27.798 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.crai	dst=null	perm=null	proto=rpc
20:33:27.799 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.799 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.800 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:27.801 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:27.801 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:27.802 INFO  MemoryStore - Block broadcast_117 stored as values in memory (estimated size 528.0 B, free 1918.7 MiB)
20:33:27.802 INFO  MemoryStore - Block broadcast_117_piece0 stored as bytes in memory (estimated size 187.0 B, free 1918.7 MiB)
20:33:27.803 INFO  BlockManagerInfo - Added broadcast_117_piece0 in memory on localhost:45281 (size: 187.0 B, free: 1919.7 MiB)
20:33:27.803 INFO  SparkContext - Created broadcast 117 from broadcast at CramSource.java:114
20:33:27.804 INFO  MemoryStore - Block broadcast_118 stored as values in memory (estimated size 297.9 KiB, free 1918.4 MiB)
20:33:27.812 INFO  MemoryStore - Block broadcast_118_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:27.812 INFO  BlockManagerInfo - Added broadcast_118_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:27.812 INFO  SparkContext - Created broadcast 118 from newAPIHadoopFile at PathSplitSource.java:96
20:33:27.828 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.829 INFO  FileInputFormat - Total input files to process : 1
20:33:27.829 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.859 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:27.859 INFO  DAGScheduler - Got job 49 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:27.859 INFO  DAGScheduler - Final stage: ResultStage 66 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:27.859 INFO  DAGScheduler - Parents of final stage: List()
20:33:27.859 INFO  DAGScheduler - Missing parents: List()
20:33:27.860 INFO  DAGScheduler - Submitting ResultStage 66 (MapPartitionsRDD[263] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:27.871 INFO  MemoryStore - Block broadcast_119 stored as values in memory (estimated size 286.8 KiB, free 1918.1 MiB)
20:33:27.872 INFO  MemoryStore - Block broadcast_119_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1918.0 MiB)
20:33:27.872 INFO  BlockManagerInfo - Added broadcast_119_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.6 MiB)
20:33:27.872 INFO  SparkContext - Created broadcast 119 from broadcast at DAGScheduler.scala:1580
20:33:27.873 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 66 (MapPartitionsRDD[263] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:27.873 INFO  TaskSchedulerImpl - Adding task set 66.0 with 1 tasks resource profile 0
20:33:27.873 INFO  TaskSetManager - Starting task 0.0 in stage 66.0 (TID 104) (localhost, executor driver, partition 0, ANY, 7853 bytes) 
20:33:27.874 INFO  Executor - Running task 0.0 in stage 66.0 (TID 104)
20:33:27.895 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram:0+43713
20:33:27.896 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.897 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:27.898 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.crai	dst=null	perm=null	proto=rpc
20:33:27.898 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.crai	dst=null	perm=null	proto=rpc
20:33:27.900 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:27.901 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:27.901 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:27.943 INFO  Executor - Finished task 0.0 in stage 66.0 (TID 104). 154101 bytes result sent to driver
20:33:27.944 INFO  TaskSetManager - Finished task 0.0 in stage 66.0 (TID 104) in 70 ms on localhost (executor driver) (1/1)
20:33:27.944 INFO  TaskSchedulerImpl - Removed TaskSet 66.0, whose tasks have all completed, from pool 
20:33:27.944 INFO  DAGScheduler - ResultStage 66 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.084 s
20:33:27.944 INFO  DAGScheduler - Job 49 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:27.944 INFO  TaskSchedulerImpl - Killing all running tasks in stage 66: Stage finished
20:33:27.944 INFO  DAGScheduler - Job 49 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.085206 s
20:33:27.950 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:27.950 INFO  DAGScheduler - Got job 50 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:27.950 INFO  DAGScheduler - Final stage: ResultStage 67 (count at ReadsSparkSinkUnitTest.java:185)
20:33:27.950 INFO  DAGScheduler - Parents of final stage: List()
20:33:27.950 INFO  DAGScheduler - Missing parents: List()
20:33:27.950 INFO  DAGScheduler - Submitting ResultStage 67 (MapPartitionsRDD[246] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:27.962 INFO  MemoryStore - Block broadcast_120 stored as values in memory (estimated size 286.8 KiB, free 1917.7 MiB)
20:33:27.963 INFO  MemoryStore - Block broadcast_120_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1917.6 MiB)
20:33:27.963 INFO  BlockManagerInfo - Added broadcast_120_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.5 MiB)
20:33:27.963 INFO  SparkContext - Created broadcast 120 from broadcast at DAGScheduler.scala:1580
20:33:27.964 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 67 (MapPartitionsRDD[246] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:27.964 INFO  TaskSchedulerImpl - Adding task set 67.0 with 1 tasks resource profile 0
20:33:27.964 INFO  TaskSetManager - Starting task 0.0 in stage 67.0 (TID 105) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7880 bytes) 
20:33:27.965 INFO  Executor - Running task 0.0 in stage 67.0 (TID 105)
20:33:27.986 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:27.994 INFO  Executor - Finished task 0.0 in stage 67.0 (TID 105). 989 bytes result sent to driver
20:33:27.995 INFO  TaskSetManager - Finished task 0.0 in stage 67.0 (TID 105) in 31 ms on localhost (executor driver) (1/1)
20:33:27.995 INFO  TaskSchedulerImpl - Removed TaskSet 67.0, whose tasks have all completed, from pool 
20:33:27.995 INFO  DAGScheduler - ResultStage 67 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.045 s
20:33:27.995 INFO  DAGScheduler - Job 50 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:27.995 INFO  TaskSchedulerImpl - Killing all running tasks in stage 67: Stage finished
20:33:27.995 INFO  DAGScheduler - Job 50 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.045638 s
20:33:28.000 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:28.000 INFO  DAGScheduler - Got job 51 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:28.000 INFO  DAGScheduler - Final stage: ResultStage 68 (count at ReadsSparkSinkUnitTest.java:185)
20:33:28.000 INFO  DAGScheduler - Parents of final stage: List()
20:33:28.000 INFO  DAGScheduler - Missing parents: List()
20:33:28.000 INFO  DAGScheduler - Submitting ResultStage 68 (MapPartitionsRDD[263] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:28.012 INFO  MemoryStore - Block broadcast_121 stored as values in memory (estimated size 286.8 KiB, free 1917.3 MiB)
20:33:28.019 INFO  BlockManagerInfo - Removed broadcast_120_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.6 MiB)
20:33:28.019 INFO  MemoryStore - Block broadcast_121_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1917.6 MiB)
20:33:28.019 INFO  BlockManagerInfo - Added broadcast_121_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.5 MiB)
20:33:28.020 INFO  SparkContext - Created broadcast 121 from broadcast at DAGScheduler.scala:1580
20:33:28.020 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 68 (MapPartitionsRDD[263] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:28.020 INFO  TaskSchedulerImpl - Adding task set 68.0 with 1 tasks resource profile 0
20:33:28.020 INFO  BlockManagerInfo - Removed broadcast_111_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.5 MiB)
20:33:28.021 INFO  TaskSetManager - Starting task 0.0 in stage 68.0 (TID 106) (localhost, executor driver, partition 0, ANY, 7853 bytes) 
20:33:28.021 INFO  BlockManagerInfo - Removed broadcast_119_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.6 MiB)
20:33:28.022 INFO  Executor - Running task 0.0 in stage 68.0 (TID 106)
20:33:28.022 INFO  BlockManagerInfo - Removed broadcast_116_piece0 on localhost:45281 in memory (size: 58.1 KiB, free: 1919.6 MiB)
20:33:28.023 INFO  BlockManagerInfo - Removed broadcast_115_piece0 on localhost:45281 in memory (size: 107.3 KiB, free: 1919.7 MiB)
20:33:28.023 INFO  BlockManagerInfo - Removed broadcast_114_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.7 MiB)
20:33:28.024 INFO  BlockManagerInfo - Removed broadcast_112_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:28.025 INFO  BlockManagerInfo - Removed broadcast_113_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.8 MiB)
20:33:28.044 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram:0+43713
20:33:28.045 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:28.045 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram	dst=null	perm=null	proto=rpc
20:33:28.046 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.cram.crai	dst=null	perm=null	proto=rpc
20:33:28.047 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1f05716c-dc36-4b4a-a4f8-f5615beaa0b0.crai	dst=null	perm=null	proto=rpc
20:33:28.049 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:28.049 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:28.050 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:28.068 INFO  Executor - Finished task 0.0 in stage 68.0 (TID 106). 989 bytes result sent to driver
20:33:28.069 INFO  TaskSetManager - Finished task 0.0 in stage 68.0 (TID 106) in 48 ms on localhost (executor driver) (1/1)
20:33:28.069 INFO  TaskSchedulerImpl - Removed TaskSet 68.0, whose tasks have all completed, from pool 
20:33:28.069 INFO  DAGScheduler - ResultStage 68 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.068 s
20:33:28.069 INFO  DAGScheduler - Job 51 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.069 INFO  TaskSchedulerImpl - Killing all running tasks in stage 68: Stage finished
20:33:28.069 INFO  DAGScheduler - Job 51 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.069421 s
20:33:28.074 INFO  MemoryStore - Block broadcast_122 stored as values in memory (estimated size 297.9 KiB, free 1918.6 MiB)
20:33:28.085 INFO  MemoryStore - Block broadcast_122_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.6 MiB)
20:33:28.085 INFO  BlockManagerInfo - Added broadcast_122_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:28.085 INFO  SparkContext - Created broadcast 122 from newAPIHadoopFile at PathSplitSource.java:96
20:33:28.113 INFO  MemoryStore - Block broadcast_123 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:28.119 INFO  MemoryStore - Block broadcast_123_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.3 MiB)
20:33:28.120 INFO  BlockManagerInfo - Added broadcast_123_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:28.120 INFO  SparkContext - Created broadcast 123 from newAPIHadoopFile at PathSplitSource.java:96
20:33:28.141 INFO  FileInputFormat - Total input files to process : 1
20:33:28.143 INFO  MemoryStore - Block broadcast_124 stored as values in memory (estimated size 160.7 KiB, free 1918.1 MiB)
20:33:28.144 INFO  MemoryStore - Block broadcast_124_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.1 MiB)
20:33:28.144 INFO  BlockManagerInfo - Added broadcast_124_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:28.144 INFO  SparkContext - Created broadcast 124 from broadcast at ReadsSparkSink.java:133
20:33:28.155 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts	dst=null	perm=null	proto=rpc
20:33:28.156 INFO  deprecation - mapred.output.dir is deprecated. Instead, use mapreduce.output.fileoutputformat.outputdir
20:33:28.157 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:28.157 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.157 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.158 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:28.166 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:28.167 INFO  DAGScheduler - Registering RDD 277 (mapToPair at SparkUtils.java:161) as input to shuffle 15
20:33:28.167 INFO  DAGScheduler - Got job 52 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:28.167 INFO  DAGScheduler - Final stage: ResultStage 70 (runJob at SparkHadoopWriter.scala:83)
20:33:28.167 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 69)
20:33:28.167 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 69)
20:33:28.167 INFO  DAGScheduler - Submitting ShuffleMapStage 69 (MapPartitionsRDD[277] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:28.186 INFO  MemoryStore - Block broadcast_125 stored as values in memory (estimated size 520.4 KiB, free 1917.6 MiB)
20:33:28.188 INFO  MemoryStore - Block broadcast_125_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.4 MiB)
20:33:28.188 INFO  BlockManagerInfo - Added broadcast_125_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.5 MiB)
20:33:28.189 INFO  SparkContext - Created broadcast 125 from broadcast at DAGScheduler.scala:1580
20:33:28.189 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 69 (MapPartitionsRDD[277] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:28.189 INFO  TaskSchedulerImpl - Adding task set 69.0 with 1 tasks resource profile 0
20:33:28.190 INFO  TaskSetManager - Starting task 0.0 in stage 69.0 (TID 107) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:28.190 INFO  Executor - Running task 0.0 in stage 69.0 (TID 107)
20:33:28.236 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:28.257 INFO  Executor - Finished task 0.0 in stage 69.0 (TID 107). 1148 bytes result sent to driver
20:33:28.258 INFO  TaskSetManager - Finished task 0.0 in stage 69.0 (TID 107) in 68 ms on localhost (executor driver) (1/1)
20:33:28.258 INFO  TaskSchedulerImpl - Removed TaskSet 69.0, whose tasks have all completed, from pool 
20:33:28.259 INFO  DAGScheduler - ShuffleMapStage 69 (mapToPair at SparkUtils.java:161) finished in 0.090 s
20:33:28.259 INFO  DAGScheduler - looking for newly runnable stages
20:33:28.259 INFO  DAGScheduler - running: HashSet()
20:33:28.259 INFO  DAGScheduler - waiting: HashSet(ResultStage 70)
20:33:28.259 INFO  DAGScheduler - failed: HashSet()
20:33:28.259 INFO  DAGScheduler - Submitting ResultStage 70 (MapPartitionsRDD[283] at saveAsTextFile at SamSink.java:65), which has no missing parents
20:33:28.266 INFO  MemoryStore - Block broadcast_126 stored as values in memory (estimated size 241.1 KiB, free 1917.2 MiB)
20:33:28.267 INFO  MemoryStore - Block broadcast_126_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1917.1 MiB)
20:33:28.268 INFO  BlockManagerInfo - Added broadcast_126_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.5 MiB)
20:33:28.268 INFO  SparkContext - Created broadcast 126 from broadcast at DAGScheduler.scala:1580
20:33:28.268 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 70 (MapPartitionsRDD[283] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
20:33:28.268 INFO  TaskSchedulerImpl - Adding task set 70.0 with 1 tasks resource profile 0
20:33:28.269 INFO  TaskSetManager - Starting task 0.0 in stage 70.0 (TID 108) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:28.269 INFO  Executor - Running task 0.0 in stage 70.0 (TID 108)
20:33:28.275 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:28.275 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:28.289 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:28.289 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.289 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.291 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/_temporary/attempt_202507152033284701790765270470203_0283_m_000000_0/part-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.294 INFO  StateChange - BLOCK* allocate blk_1073741869_1045, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/_temporary/attempt_202507152033284701790765270470203_0283_m_000000_0/part-00000
20:33:28.295 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741869_1045 src: /127.0.0.1:55356 dest: /127.0.0.1:35765
20:33:28.302 INFO  clienttrace - src: /127.0.0.1:55356, dest: /127.0.0.1:35765, bytes: 761729, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741869_1045, duration(ns): 6101842
20:33:28.302 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741869_1045, type=LAST_IN_PIPELINE terminating
20:33:28.303 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/_temporary/attempt_202507152033284701790765270470203_0283_m_000000_0/part-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:28.304 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/_temporary/attempt_202507152033284701790765270470203_0283_m_000000_0	dst=null	perm=null	proto=rpc
20:33:28.305 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/_temporary/attempt_202507152033284701790765270470203_0283_m_000000_0	dst=null	perm=null	proto=rpc
20:33:28.306 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/task_202507152033284701790765270470203_0283_m_000000	dst=null	perm=null	proto=rpc
20:33:28.306 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/_temporary/attempt_202507152033284701790765270470203_0283_m_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/task_202507152033284701790765270470203_0283_m_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:28.307 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033284701790765270470203_0283_m_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/task_202507152033284701790765270470203_0283_m_000000
20:33:28.307 INFO  SparkHadoopMapRedUtil - attempt_202507152033284701790765270470203_0283_m_000000_0: Committed. Elapsed time: 1 ms.
20:33:28.307 INFO  Executor - Finished task 0.0 in stage 70.0 (TID 108). 1858 bytes result sent to driver
20:33:28.308 INFO  TaskSetManager - Finished task 0.0 in stage 70.0 (TID 108) in 39 ms on localhost (executor driver) (1/1)
20:33:28.308 INFO  TaskSchedulerImpl - Removed TaskSet 70.0, whose tasks have all completed, from pool 
20:33:28.308 INFO  DAGScheduler - ResultStage 70 (runJob at SparkHadoopWriter.scala:83) finished in 0.049 s
20:33:28.308 INFO  DAGScheduler - Job 52 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.308 INFO  TaskSchedulerImpl - Killing all running tasks in stage 70: Stage finished
20:33:28.309 INFO  DAGScheduler - Job 52 finished: runJob at SparkHadoopWriter.scala:83, took 0.142357 s
20:33:28.309 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033284701790765270470203_0283.
20:33:28.309 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:28.310 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts	dst=null	perm=null	proto=rpc
20:33:28.310 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/task_202507152033284701790765270470203_0283_m_000000	dst=null	perm=null	proto=rpc
20:33:28.311 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/part-00000	dst=null	perm=null	proto=rpc
20:33:28.311 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary/0/task_202507152033284701790765270470203_0283_m_000000/part-00000	dst=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/part-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.312 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:28.313 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.314 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:28.314 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/.spark-staging-283	dst=null	perm=null	proto=rpc
20:33:28.314 INFO  SparkHadoopWriter - Write Job job_202507152033284701790765270470203_0283 committed. Elapsed time: 5 ms.
20:33:28.315 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.318 INFO  StateChange - BLOCK* allocate blk_1073741870_1046, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/header
20:33:28.319 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741870_1046 src: /127.0.0.1:55358 dest: /127.0.0.1:35765
20:33:28.320 INFO  clienttrace - src: /127.0.0.1:55358, dest: /127.0.0.1:35765, bytes: 85829, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741870_1046, duration(ns): 711310
20:33:28.320 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741870_1046, type=LAST_IN_PIPELINE terminating
20:33:28.321 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:28.322 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts	dst=null	perm=null	proto=rpc
20:33:28.323 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.323 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:28.324 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam
20:33:28.324 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/header, /user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/part-00000]	dst=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.324 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.325 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:28.325 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam done
20:33:28.326 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam.parts	dst=null	perm=null	proto=rpc
20:33:28.326 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.327 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.327 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.328 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
WARNING	2025-07-15 20:33:28	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:28.330 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:28.331 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.332 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.332 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
WARNING	2025-07-15 20:33:28	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:28.334 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:28.335 WARN  DFSUtil - Unexpected value for data transfer bytes=767681 duration=0
20:33:28.337 INFO  MemoryStore - Block broadcast_127 stored as values in memory (estimated size 160.7 KiB, free 1917.0 MiB)
20:33:28.338 INFO  MemoryStore - Block broadcast_127_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.0 MiB)
20:33:28.338 INFO  BlockManagerInfo - Added broadcast_127_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:28.338 INFO  SparkContext - Created broadcast 127 from broadcast at SamSource.java:78
20:33:28.340 INFO  MemoryStore - Block broadcast_128 stored as values in memory (estimated size 297.9 KiB, free 1916.7 MiB)
20:33:28.351 INFO  MemoryStore - Block broadcast_128_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.6 MiB)
20:33:28.351 INFO  BlockManagerInfo - Added broadcast_128_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:28.351 INFO  SparkContext - Created broadcast 128 from newAPIHadoopFile at SamSource.java:108
20:33:28.360 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.360 INFO  FileInputFormat - Total input files to process : 1
20:33:28.361 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.372 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:28.373 INFO  DAGScheduler - Got job 53 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:28.373 INFO  DAGScheduler - Final stage: ResultStage 71 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:28.373 INFO  DAGScheduler - Parents of final stage: List()
20:33:28.373 INFO  DAGScheduler - Missing parents: List()
20:33:28.373 INFO  DAGScheduler - Submitting ResultStage 71 (MapPartitionsRDD[288] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:28.374 INFO  MemoryStore - Block broadcast_129 stored as values in memory (estimated size 7.5 KiB, free 1916.6 MiB)
20:33:28.374 INFO  MemoryStore - Block broadcast_129_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1916.6 MiB)
20:33:28.374 INFO  BlockManagerInfo - Added broadcast_129_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.4 MiB)
20:33:28.375 INFO  SparkContext - Created broadcast 129 from broadcast at DAGScheduler.scala:1580
20:33:28.375 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 71 (MapPartitionsRDD[288] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:28.375 INFO  TaskSchedulerImpl - Adding task set 71.0 with 1 tasks resource profile 0
20:33:28.375 INFO  TaskSetManager - Starting task 0.0 in stage 71.0 (TID 109) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:28.376 INFO  Executor - Running task 0.0 in stage 71.0 (TID 109)
20:33:28.378 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam:0+847558
20:33:28.383 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.390 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:28.438 INFO  Executor - Finished task 0.0 in stage 71.0 (TID 109). 651526 bytes result sent to driver
20:33:28.441 INFO  TaskSetManager - Finished task 0.0 in stage 71.0 (TID 109) in 66 ms on localhost (executor driver) (1/1)
20:33:28.441 INFO  TaskSchedulerImpl - Removed TaskSet 71.0, whose tasks have all completed, from pool 
20:33:28.441 INFO  DAGScheduler - ResultStage 71 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.068 s
20:33:28.441 INFO  DAGScheduler - Job 53 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.441 INFO  TaskSchedulerImpl - Killing all running tasks in stage 71: Stage finished
20:33:28.441 INFO  DAGScheduler - Job 53 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.068734 s
20:33:28.451 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:28.451 INFO  DAGScheduler - Got job 54 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:28.451 INFO  DAGScheduler - Final stage: ResultStage 72 (count at ReadsSparkSinkUnitTest.java:185)
20:33:28.451 INFO  DAGScheduler - Parents of final stage: List()
20:33:28.451 INFO  DAGScheduler - Missing parents: List()
20:33:28.451 INFO  DAGScheduler - Submitting ResultStage 72 (MapPartitionsRDD[270] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:28.468 INFO  MemoryStore - Block broadcast_130 stored as values in memory (estimated size 426.1 KiB, free 1916.2 MiB)
20:33:28.470 INFO  MemoryStore - Block broadcast_130_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.0 MiB)
20:33:28.470 INFO  BlockManagerInfo - Added broadcast_130_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.3 MiB)
20:33:28.470 INFO  SparkContext - Created broadcast 130 from broadcast at DAGScheduler.scala:1580
20:33:28.470 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 72 (MapPartitionsRDD[270] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:28.470 INFO  TaskSchedulerImpl - Adding task set 72.0 with 1 tasks resource profile 0
20:33:28.471 INFO  TaskSetManager - Starting task 0.0 in stage 72.0 (TID 110) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:28.471 INFO  Executor - Running task 0.0 in stage 72.0 (TID 110)
20:33:28.501 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:28.511 INFO  Executor - Finished task 0.0 in stage 72.0 (TID 110). 989 bytes result sent to driver
20:33:28.511 INFO  TaskSetManager - Finished task 0.0 in stage 72.0 (TID 110) in 40 ms on localhost (executor driver) (1/1)
20:33:28.511 INFO  TaskSchedulerImpl - Removed TaskSet 72.0, whose tasks have all completed, from pool 
20:33:28.512 INFO  DAGScheduler - ResultStage 72 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.060 s
20:33:28.512 INFO  DAGScheduler - Job 54 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.512 INFO  TaskSchedulerImpl - Killing all running tasks in stage 72: Stage finished
20:33:28.512 INFO  DAGScheduler - Job 54 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061214 s
20:33:28.515 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:28.516 INFO  DAGScheduler - Got job 55 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:28.516 INFO  DAGScheduler - Final stage: ResultStage 73 (count at ReadsSparkSinkUnitTest.java:185)
20:33:28.516 INFO  DAGScheduler - Parents of final stage: List()
20:33:28.516 INFO  DAGScheduler - Missing parents: List()
20:33:28.516 INFO  DAGScheduler - Submitting ResultStage 73 (MapPartitionsRDD[288] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:28.517 INFO  MemoryStore - Block broadcast_131 stored as values in memory (estimated size 7.4 KiB, free 1916.0 MiB)
20:33:28.517 INFO  MemoryStore - Block broadcast_131_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1916.0 MiB)
20:33:28.517 INFO  BlockManagerInfo - Added broadcast_131_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.2 MiB)
20:33:28.518 INFO  SparkContext - Created broadcast 131 from broadcast at DAGScheduler.scala:1580
20:33:28.518 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 73 (MapPartitionsRDD[288] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:28.518 INFO  TaskSchedulerImpl - Adding task set 73.0 with 1 tasks resource profile 0
20:33:28.519 INFO  TaskSetManager - Starting task 0.0 in stage 73.0 (TID 111) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:28.519 INFO  Executor - Running task 0.0 in stage 73.0 (TID 111)
20:33:28.521 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam:0+847558
20:33:28.522 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_7f488b6f-e291-4246-ad19-87c1a207fe14.sam	dst=null	perm=null	proto=rpc
20:33:28.524 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:28.536 INFO  Executor - Finished task 0.0 in stage 73.0 (TID 111). 989 bytes result sent to driver
20:33:28.536 INFO  TaskSetManager - Finished task 0.0 in stage 73.0 (TID 111) in 18 ms on localhost (executor driver) (1/1)
20:33:28.536 INFO  TaskSchedulerImpl - Removed TaskSet 73.0, whose tasks have all completed, from pool 
20:33:28.537 INFO  DAGScheduler - ResultStage 73 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.021 s
20:33:28.537 INFO  DAGScheduler - Job 55 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.537 INFO  TaskSchedulerImpl - Killing all running tasks in stage 73: Stage finished
20:33:28.537 INFO  DAGScheduler - Job 55 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.021530 s
20:33:28.540 INFO  MemoryStore - Block broadcast_132 stored as values in memory (estimated size 297.9 KiB, free 1915.7 MiB)
20:33:28.546 INFO  MemoryStore - Block broadcast_132_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1915.7 MiB)
20:33:28.547 INFO  BlockManagerInfo - Added broadcast_132_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.2 MiB)
20:33:28.547 INFO  SparkContext - Created broadcast 132 from newAPIHadoopFile at PathSplitSource.java:96
20:33:28.572 INFO  MemoryStore - Block broadcast_133 stored as values in memory (estimated size 297.9 KiB, free 1915.4 MiB)
20:33:28.582 INFO  BlockManagerInfo - Removed broadcast_123_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:28.583 INFO  BlockManagerInfo - Removed broadcast_127_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:28.583 INFO  BlockManagerInfo - Removed broadcast_117_piece0 on localhost:45281 in memory (size: 187.0 B, free: 1919.3 MiB)
20:33:28.584 INFO  BlockManagerInfo - Removed broadcast_128_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:28.585 INFO  BlockManagerInfo - Removed broadcast_129_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1919.3 MiB)
20:33:28.586 INFO  BlockManagerInfo - Removed broadcast_125_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:28.586 INFO  BlockManagerInfo - Removed broadcast_126_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.5 MiB)
20:33:28.587 INFO  BlockManagerInfo - Removed broadcast_121_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.6 MiB)
20:33:28.588 INFO  BlockManagerInfo - Removed broadcast_124_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:28.588 INFO  MemoryStore - Block broadcast_133_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:28.588 INFO  BlockManagerInfo - Added broadcast_133_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:28.589 INFO  SparkContext - Created broadcast 133 from newAPIHadoopFile at PathSplitSource.java:96
20:33:28.589 INFO  BlockManagerInfo - Removed broadcast_109_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.6 MiB)
20:33:28.590 INFO  BlockManagerInfo - Removed broadcast_118_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:28.591 INFO  BlockManagerInfo - Removed broadcast_122_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:28.591 INFO  BlockManagerInfo - Removed broadcast_131_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1919.7 MiB)
20:33:28.592 INFO  BlockManagerInfo - Removed broadcast_110_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:28.592 INFO  BlockManagerInfo - Removed broadcast_130_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.9 MiB)
20:33:28.620 INFO  MemoryStore - Block broadcast_134 stored as values in memory (estimated size 160.7 KiB, free 1919.2 MiB)
20:33:28.621 INFO  MemoryStore - Block broadcast_134_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.2 MiB)
20:33:28.621 INFO  BlockManagerInfo - Added broadcast_134_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:28.621 INFO  SparkContext - Created broadcast 134 from broadcast at ReadsSparkSink.java:133
20:33:28.623 INFO  MemoryStore - Block broadcast_135 stored as values in memory (estimated size 163.2 KiB, free 1919.0 MiB)
20:33:28.624 INFO  MemoryStore - Block broadcast_135_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.0 MiB)
20:33:28.625 INFO  BlockManagerInfo - Added broadcast_135_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:28.625 INFO  SparkContext - Created broadcast 135 from broadcast at AnySamSinkMultiple.java:80
20:33:28.630 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:28.630 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.630 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.644 INFO  FileInputFormat - Total input files to process : 1
20:33:28.657 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:28.658 INFO  DAGScheduler - Registering RDD 296 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 16
20:33:28.658 INFO  DAGScheduler - Got job 56 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:28.658 INFO  DAGScheduler - Final stage: ResultStage 75 (runJob at SparkHadoopWriter.scala:83)
20:33:28.658 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 74)
20:33:28.658 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 74)
20:33:28.658 INFO  DAGScheduler - Submitting ShuffleMapStage 74 (MapPartitionsRDD[296] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:28.676 INFO  MemoryStore - Block broadcast_136 stored as values in memory (estimated size 427.7 KiB, free 1918.6 MiB)
20:33:28.678 INFO  MemoryStore - Block broadcast_136_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1918.4 MiB)
20:33:28.678 INFO  BlockManagerInfo - Added broadcast_136_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.7 MiB)
20:33:28.678 INFO  SparkContext - Created broadcast 136 from broadcast at DAGScheduler.scala:1580
20:33:28.678 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 74 (MapPartitionsRDD[296] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:28.678 INFO  TaskSchedulerImpl - Adding task set 74.0 with 1 tasks resource profile 0
20:33:28.679 INFO  TaskSetManager - Starting task 0.0 in stage 74.0 (TID 112) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:28.679 INFO  Executor - Running task 0.0 in stage 74.0 (TID 112)
20:33:28.711 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:28.731 INFO  Executor - Finished task 0.0 in stage 74.0 (TID 112). 1149 bytes result sent to driver
20:33:28.732 INFO  TaskSetManager - Finished task 0.0 in stage 74.0 (TID 112) in 53 ms on localhost (executor driver) (1/1)
20:33:28.732 INFO  TaskSchedulerImpl - Removed TaskSet 74.0, whose tasks have all completed, from pool 
20:33:28.732 INFO  DAGScheduler - ShuffleMapStage 74 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.073 s
20:33:28.732 INFO  DAGScheduler - looking for newly runnable stages
20:33:28.732 INFO  DAGScheduler - running: HashSet()
20:33:28.732 INFO  DAGScheduler - waiting: HashSet(ResultStage 75)
20:33:28.732 INFO  DAGScheduler - failed: HashSet()
20:33:28.732 INFO  DAGScheduler - Submitting ResultStage 75 (MapPartitionsRDD[308] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:28.740 INFO  MemoryStore - Block broadcast_137 stored as values in memory (estimated size 150.2 KiB, free 1918.3 MiB)
20:33:28.741 INFO  MemoryStore - Block broadcast_137_piece0 stored as bytes in memory (estimated size 56.3 KiB, free 1918.2 MiB)
20:33:28.741 INFO  BlockManagerInfo - Added broadcast_137_piece0 in memory on localhost:45281 (size: 56.3 KiB, free: 1919.7 MiB)
20:33:28.741 INFO  SparkContext - Created broadcast 137 from broadcast at DAGScheduler.scala:1580
20:33:28.742 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 75 (MapPartitionsRDD[308] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:28.742 INFO  TaskSchedulerImpl - Adding task set 75.0 with 2 tasks resource profile 0
20:33:28.743 INFO  TaskSetManager - Starting task 0.0 in stage 75.0 (TID 113) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:28.743 INFO  TaskSetManager - Starting task 1.0 in stage 75.0 (TID 114) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:28.743 INFO  Executor - Running task 0.0 in stage 75.0 (TID 113)
20:33:28.744 INFO  Executor - Running task 1.0 in stage 75.0 (TID 114)
20:33:28.751 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:28.751 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:28.751 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.751 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.751 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.751 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.751 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:28.751 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.751 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.752 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:28.752 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:28.753 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:28.763 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:28.763 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:28.766 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:28.766 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:28.774 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033284273974037619170419_0308_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest112094805338769790597.bam/_temporary/0/task_202507152033284273974037619170419_0308_r_000001
20:33:28.775 INFO  SparkHadoopMapRedUtil - attempt_202507152033284273974037619170419_0308_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:28.775 INFO  Executor - Finished task 1.0 in stage 75.0 (TID 114). 1729 bytes result sent to driver
20:33:28.776 INFO  TaskSetManager - Finished task 1.0 in stage 75.0 (TID 114) in 33 ms on localhost (executor driver) (1/2)
20:33:28.776 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033284273974037619170419_0308_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest112094805338769790597.bam/_temporary/0/task_202507152033284273974037619170419_0308_r_000000
20:33:28.776 INFO  SparkHadoopMapRedUtil - attempt_202507152033284273974037619170419_0308_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:28.777 INFO  Executor - Finished task 0.0 in stage 75.0 (TID 113). 1729 bytes result sent to driver
20:33:28.777 INFO  TaskSetManager - Finished task 0.0 in stage 75.0 (TID 113) in 35 ms on localhost (executor driver) (2/2)
20:33:28.777 INFO  TaskSchedulerImpl - Removed TaskSet 75.0, whose tasks have all completed, from pool 
20:33:28.777 INFO  DAGScheduler - ResultStage 75 (runJob at SparkHadoopWriter.scala:83) finished in 0.044 s
20:33:28.777 INFO  DAGScheduler - Job 56 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.777 INFO  TaskSchedulerImpl - Killing all running tasks in stage 75: Stage finished
20:33:28.777 INFO  DAGScheduler - Job 56 finished: runJob at SparkHadoopWriter.scala:83, took 0.120411 s
20:33:28.778 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033284273974037619170419_0308.
20:33:28.784 INFO  SparkHadoopWriter - Write Job job_202507152033284273974037619170419_0308 committed. Elapsed time: 5 ms.
20:33:28.788 INFO  MemoryStore - Block broadcast_138 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:28.799 INFO  MemoryStore - Block broadcast_138_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:28.799 INFO  BlockManagerInfo - Added broadcast_138_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:28.799 INFO  SparkContext - Created broadcast 138 from newAPIHadoopFile at PathSplitSource.java:96
20:33:28.822 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:28.823 INFO  DAGScheduler - Got job 57 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:28.823 INFO  DAGScheduler - Final stage: ResultStage 77 (count at ReadsSparkSinkUnitTest.java:222)
20:33:28.823 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 76)
20:33:28.823 INFO  DAGScheduler - Missing parents: List()
20:33:28.823 INFO  DAGScheduler - Submitting ResultStage 77 (MapPartitionsRDD[299] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:28.824 INFO  MemoryStore - Block broadcast_139 stored as values in memory (estimated size 6.3 KiB, free 1917.9 MiB)
20:33:28.824 INFO  MemoryStore - Block broadcast_139_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1917.9 MiB)
20:33:28.825 INFO  BlockManagerInfo - Added broadcast_139_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.6 MiB)
20:33:28.825 INFO  SparkContext - Created broadcast 139 from broadcast at DAGScheduler.scala:1580
20:33:28.825 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 77 (MapPartitionsRDD[299] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:28.825 INFO  TaskSchedulerImpl - Adding task set 77.0 with 2 tasks resource profile 0
20:33:28.826 INFO  TaskSetManager - Starting task 0.0 in stage 77.0 (TID 115) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:28.826 INFO  TaskSetManager - Starting task 1.0 in stage 77.0 (TID 116) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:28.826 INFO  Executor - Running task 0.0 in stage 77.0 (TID 115)
20:33:28.826 INFO  Executor - Running task 1.0 in stage 77.0 (TID 116)
20:33:28.828 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:28.828 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:28.828 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:28.828 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:28.833 INFO  Executor - Finished task 0.0 in stage 77.0 (TID 115). 1634 bytes result sent to driver
20:33:28.833 INFO  TaskSetManager - Finished task 0.0 in stage 77.0 (TID 115) in 7 ms on localhost (executor driver) (1/2)
20:33:28.833 INFO  Executor - Finished task 1.0 in stage 77.0 (TID 116). 1634 bytes result sent to driver
20:33:28.834 INFO  TaskSetManager - Finished task 1.0 in stage 77.0 (TID 116) in 8 ms on localhost (executor driver) (2/2)
20:33:28.834 INFO  TaskSchedulerImpl - Removed TaskSet 77.0, whose tasks have all completed, from pool 
20:33:28.834 INFO  DAGScheduler - ResultStage 77 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.011 s
20:33:28.834 INFO  DAGScheduler - Job 57 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.834 INFO  TaskSchedulerImpl - Killing all running tasks in stage 77: Stage finished
20:33:28.834 INFO  DAGScheduler - Job 57 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.012059 s
20:33:28.849 INFO  FileInputFormat - Total input files to process : 2
20:33:28.853 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:28.853 INFO  DAGScheduler - Got job 58 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:28.853 INFO  DAGScheduler - Final stage: ResultStage 78 (count at ReadsSparkSinkUnitTest.java:222)
20:33:28.853 INFO  DAGScheduler - Parents of final stage: List()
20:33:28.853 INFO  DAGScheduler - Missing parents: List()
20:33:28.853 INFO  DAGScheduler - Submitting ResultStage 78 (MapPartitionsRDD[315] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:28.871 INFO  MemoryStore - Block broadcast_140 stored as values in memory (estimated size 426.1 KiB, free 1917.4 MiB)
20:33:28.872 INFO  MemoryStore - Block broadcast_140_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.3 MiB)
20:33:28.872 INFO  BlockManagerInfo - Added broadcast_140_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:28.872 INFO  SparkContext - Created broadcast 140 from broadcast at DAGScheduler.scala:1580
20:33:28.873 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 78 (MapPartitionsRDD[315] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:28.873 INFO  TaskSchedulerImpl - Adding task set 78.0 with 2 tasks resource profile 0
20:33:28.873 INFO  TaskSetManager - Starting task 0.0 in stage 78.0 (TID 117) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7827 bytes) 
20:33:28.873 INFO  TaskSetManager - Starting task 1.0 in stage 78.0 (TID 118) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7827 bytes) 
20:33:28.874 INFO  Executor - Running task 1.0 in stage 78.0 (TID 118)
20:33:28.874 INFO  Executor - Running task 0.0 in stage 78.0 (TID 117)
20:33:28.921 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest112094805338769790597.bam/part-r-00000.bam:0+132492
20:33:28.921 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest112094805338769790597.bam/part-r-00001.bam:0+129330
20:33:28.935 INFO  Executor - Finished task 1.0 in stage 78.0 (TID 118). 989 bytes result sent to driver
20:33:28.935 INFO  Executor - Finished task 0.0 in stage 78.0 (TID 117). 989 bytes result sent to driver
20:33:28.937 INFO  TaskSetManager - Finished task 0.0 in stage 78.0 (TID 117) in 64 ms on localhost (executor driver) (1/2)
20:33:28.937 INFO  TaskSetManager - Finished task 1.0 in stage 78.0 (TID 118) in 64 ms on localhost (executor driver) (2/2)
20:33:28.937 INFO  TaskSchedulerImpl - Removed TaskSet 78.0, whose tasks have all completed, from pool 
20:33:28.938 INFO  DAGScheduler - ResultStage 78 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.084 s
20:33:28.938 INFO  DAGScheduler - Job 58 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:28.938 INFO  TaskSchedulerImpl - Killing all running tasks in stage 78: Stage finished
20:33:28.938 INFO  DAGScheduler - Job 58 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.085322 s
20:33:28.942 INFO  MemoryStore - Block broadcast_141 stored as values in memory (estimated size 297.9 KiB, free 1917.0 MiB)
20:33:28.953 INFO  MemoryStore - Block broadcast_141_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.0 MiB)
20:33:28.953 INFO  BlockManagerInfo - Added broadcast_141_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:28.953 INFO  SparkContext - Created broadcast 141 from newAPIHadoopFile at PathSplitSource.java:96
20:33:28.983 INFO  MemoryStore - Block broadcast_142 stored as values in memory (estimated size 297.9 KiB, free 1916.7 MiB)
20:33:28.989 INFO  MemoryStore - Block broadcast_142_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.6 MiB)
20:33:28.990 INFO  BlockManagerInfo - Added broadcast_142_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:28.990 INFO  SparkContext - Created broadcast 142 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.010 INFO  MemoryStore - Block broadcast_143 stored as values in memory (estimated size 160.7 KiB, free 1916.5 MiB)
20:33:29.011 INFO  MemoryStore - Block broadcast_143_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.5 MiB)
20:33:29.011 INFO  BlockManagerInfo - Added broadcast_143_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:29.012 INFO  SparkContext - Created broadcast 143 from broadcast at ReadsSparkSink.java:133
20:33:29.013 INFO  MemoryStore - Block broadcast_144 stored as values in memory (estimated size 163.2 KiB, free 1916.3 MiB)
20:33:29.014 INFO  MemoryStore - Block broadcast_144_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.3 MiB)
20:33:29.014 INFO  BlockManagerInfo - Added broadcast_144_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:29.014 INFO  SparkContext - Created broadcast 144 from broadcast at AnySamSinkMultiple.java:80
20:33:29.016 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.016 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.016 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.028 INFO  FileInputFormat - Total input files to process : 1
20:33:29.035 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:29.035 INFO  DAGScheduler - Registering RDD 323 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 17
20:33:29.035 INFO  DAGScheduler - Got job 59 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:29.035 INFO  DAGScheduler - Final stage: ResultStage 80 (runJob at SparkHadoopWriter.scala:83)
20:33:29.035 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 79)
20:33:29.036 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 79)
20:33:29.036 INFO  DAGScheduler - Submitting ShuffleMapStage 79 (MapPartitionsRDD[323] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:29.053 INFO  MemoryStore - Block broadcast_145 stored as values in memory (estimated size 427.7 KiB, free 1915.9 MiB)
20:33:29.063 INFO  MemoryStore - Block broadcast_145_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1915.7 MiB)
20:33:29.063 INFO  BlockManagerInfo - Added broadcast_145_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.2 MiB)
20:33:29.063 INFO  BlockManagerInfo - Removed broadcast_140_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:29.064 INFO  SparkContext - Created broadcast 145 from broadcast at DAGScheduler.scala:1580
20:33:29.064 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 79 (MapPartitionsRDD[323] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:29.064 INFO  TaskSchedulerImpl - Adding task set 79.0 with 1 tasks resource profile 0
20:33:29.064 INFO  BlockManagerInfo - Removed broadcast_135_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:29.065 INFO  TaskSetManager - Starting task 0.0 in stage 79.0 (TID 119) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:29.065 INFO  BlockManagerInfo - Removed broadcast_134_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:29.065 INFO  Executor - Running task 0.0 in stage 79.0 (TID 119)
20:33:29.066 INFO  BlockManagerInfo - Removed broadcast_132_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:29.066 INFO  BlockManagerInfo - Removed broadcast_136_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.6 MiB)
20:33:29.067 INFO  BlockManagerInfo - Removed broadcast_137_piece0 on localhost:45281 in memory (size: 56.3 KiB, free: 1919.6 MiB)
20:33:29.067 INFO  BlockManagerInfo - Removed broadcast_138_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.068 INFO  BlockManagerInfo - Removed broadcast_142_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.069 INFO  BlockManagerInfo - Removed broadcast_133_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:29.069 INFO  BlockManagerInfo - Removed broadcast_139_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.8 MiB)
20:33:29.099 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:29.118 INFO  Executor - Finished task 0.0 in stage 79.0 (TID 119). 1149 bytes result sent to driver
20:33:29.119 INFO  TaskSetManager - Finished task 0.0 in stage 79.0 (TID 119) in 54 ms on localhost (executor driver) (1/1)
20:33:29.119 INFO  TaskSchedulerImpl - Removed TaskSet 79.0, whose tasks have all completed, from pool 
20:33:29.119 INFO  DAGScheduler - ShuffleMapStage 79 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.083 s
20:33:29.119 INFO  DAGScheduler - looking for newly runnable stages
20:33:29.119 INFO  DAGScheduler - running: HashSet()
20:33:29.119 INFO  DAGScheduler - waiting: HashSet(ResultStage 80)
20:33:29.119 INFO  DAGScheduler - failed: HashSet()
20:33:29.119 INFO  DAGScheduler - Submitting ResultStage 80 (MapPartitionsRDD[335] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:29.126 INFO  MemoryStore - Block broadcast_146 stored as values in memory (estimated size 150.2 KiB, free 1918.6 MiB)
20:33:29.127 INFO  MemoryStore - Block broadcast_146_piece0 stored as bytes in memory (estimated size 56.2 KiB, free 1918.6 MiB)
20:33:29.127 INFO  BlockManagerInfo - Added broadcast_146_piece0 in memory on localhost:45281 (size: 56.2 KiB, free: 1919.7 MiB)
20:33:29.127 INFO  SparkContext - Created broadcast 146 from broadcast at DAGScheduler.scala:1580
20:33:29.127 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 80 (MapPartitionsRDD[335] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.128 INFO  TaskSchedulerImpl - Adding task set 80.0 with 2 tasks resource profile 0
20:33:29.128 INFO  TaskSetManager - Starting task 0.0 in stage 80.0 (TID 120) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:29.128 INFO  TaskSetManager - Starting task 1.0 in stage 80.0 (TID 121) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:29.129 INFO  Executor - Running task 1.0 in stage 80.0 (TID 121)
20:33:29.129 INFO  Executor - Running task 0.0 in stage 80.0 (TID 120)
20:33:29.133 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.133 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.133 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.134 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.134 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.134 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.135 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.135 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.135 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.135 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.135 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.135 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.145 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.145 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.148 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.148 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.155 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033298670733518024237419_0335_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest11753924759019857658.bam/_temporary/0/task_202507152033298670733518024237419_0335_r_000000
20:33:29.155 INFO  SparkHadoopMapRedUtil - attempt_202507152033298670733518024237419_0335_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:29.156 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033298670733518024237419_0335_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest11753924759019857658.bam/_temporary/0/task_202507152033298670733518024237419_0335_r_000001
20:33:29.156 INFO  SparkHadoopMapRedUtil - attempt_202507152033298670733518024237419_0335_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:29.156 INFO  Executor - Finished task 0.0 in stage 80.0 (TID 120). 1729 bytes result sent to driver
20:33:29.156 INFO  Executor - Finished task 1.0 in stage 80.0 (TID 121). 1729 bytes result sent to driver
20:33:29.157 INFO  TaskSetManager - Finished task 0.0 in stage 80.0 (TID 120) in 29 ms on localhost (executor driver) (1/2)
20:33:29.157 INFO  TaskSetManager - Finished task 1.0 in stage 80.0 (TID 121) in 29 ms on localhost (executor driver) (2/2)
20:33:29.157 INFO  TaskSchedulerImpl - Removed TaskSet 80.0, whose tasks have all completed, from pool 
20:33:29.157 INFO  DAGScheduler - ResultStage 80 (runJob at SparkHadoopWriter.scala:83) finished in 0.037 s
20:33:29.157 INFO  DAGScheduler - Job 59 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.157 INFO  TaskSchedulerImpl - Killing all running tasks in stage 80: Stage finished
20:33:29.157 INFO  DAGScheduler - Job 59 finished: runJob at SparkHadoopWriter.scala:83, took 0.122450 s
20:33:29.158 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033298670733518024237419_0335.
20:33:29.163 INFO  SparkHadoopWriter - Write Job job_202507152033298670733518024237419_0335 committed. Elapsed time: 5 ms.
20:33:29.166 INFO  MemoryStore - Block broadcast_147 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:29.177 INFO  MemoryStore - Block broadcast_147_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:29.177 INFO  BlockManagerInfo - Added broadcast_147_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.177 INFO  SparkContext - Created broadcast 147 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.209 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:29.210 INFO  DAGScheduler - Got job 60 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:29.210 INFO  DAGScheduler - Final stage: ResultStage 82 (count at ReadsSparkSinkUnitTest.java:222)
20:33:29.210 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 81)
20:33:29.210 INFO  DAGScheduler - Missing parents: List()
20:33:29.210 INFO  DAGScheduler - Submitting ResultStage 82 (MapPartitionsRDD[326] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:29.211 INFO  MemoryStore - Block broadcast_148 stored as values in memory (estimated size 6.3 KiB, free 1918.2 MiB)
20:33:29.212 INFO  MemoryStore - Block broadcast_148_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1918.2 MiB)
20:33:29.212 INFO  BlockManagerInfo - Added broadcast_148_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.7 MiB)
20:33:29.212 INFO  SparkContext - Created broadcast 148 from broadcast at DAGScheduler.scala:1580
20:33:29.212 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 82 (MapPartitionsRDD[326] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.212 INFO  TaskSchedulerImpl - Adding task set 82.0 with 2 tasks resource profile 0
20:33:29.213 INFO  TaskSetManager - Starting task 0.0 in stage 82.0 (TID 122) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:29.213 INFO  TaskSetManager - Starting task 1.0 in stage 82.0 (TID 123) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:29.214 INFO  Executor - Running task 1.0 in stage 82.0 (TID 123)
20:33:29.214 INFO  Executor - Running task 0.0 in stage 82.0 (TID 122)
20:33:29.216 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.216 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.216 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.216 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.220 INFO  Executor - Finished task 1.0 in stage 82.0 (TID 123). 1634 bytes result sent to driver
20:33:29.220 INFO  TaskSetManager - Finished task 1.0 in stage 82.0 (TID 123) in 7 ms on localhost (executor driver) (1/2)
20:33:29.220 INFO  Executor - Finished task 0.0 in stage 82.0 (TID 122). 1634 bytes result sent to driver
20:33:29.221 INFO  TaskSetManager - Finished task 0.0 in stage 82.0 (TID 122) in 8 ms on localhost (executor driver) (2/2)
20:33:29.221 INFO  TaskSchedulerImpl - Removed TaskSet 82.0, whose tasks have all completed, from pool 
20:33:29.221 INFO  DAGScheduler - ResultStage 82 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.010 s
20:33:29.221 INFO  DAGScheduler - Job 60 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.221 INFO  TaskSchedulerImpl - Killing all running tasks in stage 82: Stage finished
20:33:29.221 INFO  DAGScheduler - Job 60 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.011811 s
20:33:29.235 INFO  FileInputFormat - Total input files to process : 2
20:33:29.238 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:29.239 INFO  DAGScheduler - Got job 61 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:29.239 INFO  DAGScheduler - Final stage: ResultStage 83 (count at ReadsSparkSinkUnitTest.java:222)
20:33:29.239 INFO  DAGScheduler - Parents of final stage: List()
20:33:29.239 INFO  DAGScheduler - Missing parents: List()
20:33:29.239 INFO  DAGScheduler - Submitting ResultStage 83 (MapPartitionsRDD[342] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:29.269 INFO  MemoryStore - Block broadcast_149 stored as values in memory (estimated size 426.1 KiB, free 1917.8 MiB)
20:33:29.271 INFO  MemoryStore - Block broadcast_149_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.6 MiB)
20:33:29.271 INFO  BlockManagerInfo - Added broadcast_149_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:29.271 INFO  SparkContext - Created broadcast 149 from broadcast at DAGScheduler.scala:1580
20:33:29.271 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 83 (MapPartitionsRDD[342] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.272 INFO  TaskSchedulerImpl - Adding task set 83.0 with 2 tasks resource profile 0
20:33:29.272 INFO  TaskSetManager - Starting task 0.0 in stage 83.0 (TID 124) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7826 bytes) 
20:33:29.272 INFO  TaskSetManager - Starting task 1.0 in stage 83.0 (TID 125) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7826 bytes) 
20:33:29.273 INFO  Executor - Running task 1.0 in stage 83.0 (TID 125)
20:33:29.273 INFO  Executor - Running task 0.0 in stage 83.0 (TID 124)
20:33:29.312 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11753924759019857658.bam/part-r-00001.bam:0+129330
20:33:29.320 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11753924759019857658.bam/part-r-00000.bam:0+132492
20:33:29.321 INFO  Executor - Finished task 0.0 in stage 83.0 (TID 124). 989 bytes result sent to driver
20:33:29.322 INFO  TaskSetManager - Finished task 0.0 in stage 83.0 (TID 124) in 49 ms on localhost (executor driver) (1/2)
20:33:29.333 INFO  Executor - Finished task 1.0 in stage 83.0 (TID 125). 989 bytes result sent to driver
20:33:29.333 INFO  TaskSetManager - Finished task 1.0 in stage 83.0 (TID 125) in 61 ms on localhost (executor driver) (2/2)
20:33:29.333 INFO  TaskSchedulerImpl - Removed TaskSet 83.0, whose tasks have all completed, from pool 
20:33:29.333 INFO  DAGScheduler - ResultStage 83 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.094 s
20:33:29.334 INFO  DAGScheduler - Job 61 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.334 INFO  TaskSchedulerImpl - Killing all running tasks in stage 83: Stage finished
20:33:29.334 INFO  DAGScheduler - Job 61 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.095333 s
20:33:29.337 INFO  MemoryStore - Block broadcast_150 stored as values in memory (estimated size 297.9 KiB, free 1917.3 MiB)
20:33:29.343 INFO  MemoryStore - Block broadcast_150_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.3 MiB)
20:33:29.343 INFO  BlockManagerInfo - Added broadcast_150_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:29.344 INFO  SparkContext - Created broadcast 150 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.368 INFO  MemoryStore - Block broadcast_151 stored as values in memory (estimated size 297.9 KiB, free 1917.0 MiB)
20:33:29.374 INFO  MemoryStore - Block broadcast_151_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.0 MiB)
20:33:29.375 INFO  BlockManagerInfo - Added broadcast_151_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:29.375 INFO  SparkContext - Created broadcast 151 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.395 INFO  MemoryStore - Block broadcast_152 stored as values in memory (estimated size 160.7 KiB, free 1916.8 MiB)
20:33:29.396 INFO  MemoryStore - Block broadcast_152_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.8 MiB)
20:33:29.396 INFO  BlockManagerInfo - Added broadcast_152_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:29.396 INFO  SparkContext - Created broadcast 152 from broadcast at ReadsSparkSink.java:133
20:33:29.398 INFO  MemoryStore - Block broadcast_153 stored as values in memory (estimated size 163.2 KiB, free 1916.6 MiB)
20:33:29.399 INFO  MemoryStore - Block broadcast_153_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.6 MiB)
20:33:29.399 INFO  BlockManagerInfo - Added broadcast_153_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:29.399 INFO  SparkContext - Created broadcast 153 from broadcast at AnySamSinkMultiple.java:80
20:33:29.401 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.401 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.401 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.413 INFO  FileInputFormat - Total input files to process : 1
20:33:29.420 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:29.420 INFO  DAGScheduler - Registering RDD 350 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 18
20:33:29.420 INFO  DAGScheduler - Got job 62 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:29.420 INFO  DAGScheduler - Final stage: ResultStage 85 (runJob at SparkHadoopWriter.scala:83)
20:33:29.420 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 84)
20:33:29.420 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 84)
20:33:29.421 INFO  DAGScheduler - Submitting ShuffleMapStage 84 (MapPartitionsRDD[350] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:29.438 INFO  MemoryStore - Block broadcast_154 stored as values in memory (estimated size 427.7 KiB, free 1916.2 MiB)
20:33:29.440 INFO  MemoryStore - Block broadcast_154_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1916.1 MiB)
20:33:29.440 INFO  BlockManagerInfo - Added broadcast_154_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.3 MiB)
20:33:29.440 INFO  SparkContext - Created broadcast 154 from broadcast at DAGScheduler.scala:1580
20:33:29.440 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 84 (MapPartitionsRDD[350] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:29.440 INFO  TaskSchedulerImpl - Adding task set 84.0 with 1 tasks resource profile 0
20:33:29.441 INFO  TaskSetManager - Starting task 0.0 in stage 84.0 (TID 126) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:29.441 INFO  Executor - Running task 0.0 in stage 84.0 (TID 126)
20:33:29.477 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:29.494 INFO  Executor - Finished task 0.0 in stage 84.0 (TID 126). 1149 bytes result sent to driver
20:33:29.495 INFO  TaskSetManager - Finished task 0.0 in stage 84.0 (TID 126) in 54 ms on localhost (executor driver) (1/1)
20:33:29.495 INFO  TaskSchedulerImpl - Removed TaskSet 84.0, whose tasks have all completed, from pool 
20:33:29.495 INFO  DAGScheduler - ShuffleMapStage 84 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.074 s
20:33:29.495 INFO  DAGScheduler - looking for newly runnable stages
20:33:29.495 INFO  DAGScheduler - running: HashSet()
20:33:29.495 INFO  DAGScheduler - waiting: HashSet(ResultStage 85)
20:33:29.495 INFO  DAGScheduler - failed: HashSet()
20:33:29.495 INFO  DAGScheduler - Submitting ResultStage 85 (MapPartitionsRDD[362] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:29.506 INFO  MemoryStore - Block broadcast_155 stored as values in memory (estimated size 150.2 KiB, free 1915.9 MiB)
20:33:29.507 INFO  MemoryStore - Block broadcast_155_piece0 stored as bytes in memory (estimated size 56.2 KiB, free 1915.9 MiB)
20:33:29.507 INFO  BlockManagerInfo - Added broadcast_155_piece0 in memory on localhost:45281 (size: 56.2 KiB, free: 1919.2 MiB)
20:33:29.507 INFO  SparkContext - Created broadcast 155 from broadcast at DAGScheduler.scala:1580
20:33:29.507 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 85 (MapPartitionsRDD[362] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.507 INFO  TaskSchedulerImpl - Adding task set 85.0 with 2 tasks resource profile 0
20:33:29.508 INFO  TaskSetManager - Starting task 0.0 in stage 85.0 (TID 127) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:29.508 INFO  TaskSetManager - Starting task 1.0 in stage 85.0 (TID 128) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:29.509 INFO  Executor - Running task 0.0 in stage 85.0 (TID 127)
20:33:29.509 INFO  Executor - Running task 1.0 in stage 85.0 (TID 128)
20:33:29.515 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.515 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.515 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.515 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.515 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.515 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.515 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.516 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.516 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.516 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.516 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.516 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.526 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.526 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.531 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.531 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.535 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033298222452143632574110_0362_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest11467479994144900249.bam/_temporary/0/task_202507152033298222452143632574110_0362_r_000000
20:33:29.535 INFO  SparkHadoopMapRedUtil - attempt_202507152033298222452143632574110_0362_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:29.536 INFO  Executor - Finished task 0.0 in stage 85.0 (TID 127). 1729 bytes result sent to driver
20:33:29.537 INFO  TaskSetManager - Finished task 0.0 in stage 85.0 (TID 127) in 29 ms on localhost (executor driver) (1/2)
20:33:29.539 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033298222452143632574110_0362_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest11467479994144900249.bam/_temporary/0/task_202507152033298222452143632574110_0362_r_000001
20:33:29.539 INFO  SparkHadoopMapRedUtil - attempt_202507152033298222452143632574110_0362_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:29.540 INFO  Executor - Finished task 1.0 in stage 85.0 (TID 128). 1729 bytes result sent to driver
20:33:29.540 INFO  TaskSetManager - Finished task 1.0 in stage 85.0 (TID 128) in 32 ms on localhost (executor driver) (2/2)
20:33:29.540 INFO  TaskSchedulerImpl - Removed TaskSet 85.0, whose tasks have all completed, from pool 
20:33:29.540 INFO  DAGScheduler - ResultStage 85 (runJob at SparkHadoopWriter.scala:83) finished in 0.044 s
20:33:29.540 INFO  DAGScheduler - Job 62 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.540 INFO  TaskSchedulerImpl - Killing all running tasks in stage 85: Stage finished
20:33:29.540 INFO  DAGScheduler - Job 62 finished: runJob at SparkHadoopWriter.scala:83, took 0.120909 s
20:33:29.541 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033298222452143632574110_0362.
20:33:29.546 INFO  SparkHadoopWriter - Write Job job_202507152033298222452143632574110_0362 committed. Elapsed time: 4 ms.
20:33:29.548 INFO  MemoryStore - Block broadcast_156 stored as values in memory (estimated size 297.9 KiB, free 1915.6 MiB)
20:33:29.558 INFO  BlockManagerInfo - Removed broadcast_141_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:29.559 INFO  BlockManagerInfo - Removed broadcast_149_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:29.559 INFO  BlockManagerInfo - Removed broadcast_154_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.6 MiB)
20:33:29.560 INFO  BlockManagerInfo - Removed broadcast_144_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:29.560 INFO  BlockManagerInfo - Removed broadcast_148_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.6 MiB)
20:33:29.561 INFO  BlockManagerInfo - Removed broadcast_151_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:29.562 INFO  BlockManagerInfo - Removed broadcast_147_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.563 INFO  BlockManagerInfo - Removed broadcast_143_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:29.563 INFO  BlockManagerInfo - Removed broadcast_146_piece0 on localhost:45281 in memory (size: 56.2 KiB, free: 1919.7 MiB)
20:33:29.564 INFO  MemoryStore - Block broadcast_156_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:29.564 INFO  BlockManagerInfo - Added broadcast_156_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.564 INFO  BlockManagerInfo - Removed broadcast_155_piece0 on localhost:45281 in memory (size: 56.2 KiB, free: 1919.7 MiB)
20:33:29.564 INFO  SparkContext - Created broadcast 156 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.565 INFO  BlockManagerInfo - Removed broadcast_145_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.9 MiB)
20:33:29.565 INFO  BlockManagerInfo - Removed broadcast_153_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.9 MiB)
20:33:29.566 INFO  BlockManagerInfo - Removed broadcast_152_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.9 MiB)
20:33:29.588 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:29.588 INFO  DAGScheduler - Got job 63 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:29.588 INFO  DAGScheduler - Final stage: ResultStage 87 (count at ReadsSparkSinkUnitTest.java:222)
20:33:29.588 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 86)
20:33:29.588 INFO  DAGScheduler - Missing parents: List()
20:33:29.589 INFO  DAGScheduler - Submitting ResultStage 87 (MapPartitionsRDD[353] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:29.589 INFO  MemoryStore - Block broadcast_157 stored as values in memory (estimated size 6.3 KiB, free 1919.3 MiB)
20:33:29.590 INFO  MemoryStore - Block broadcast_157_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1919.3 MiB)
20:33:29.590 INFO  BlockManagerInfo - Added broadcast_157_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.9 MiB)
20:33:29.590 INFO  SparkContext - Created broadcast 157 from broadcast at DAGScheduler.scala:1580
20:33:29.590 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 87 (MapPartitionsRDD[353] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.591 INFO  TaskSchedulerImpl - Adding task set 87.0 with 2 tasks resource profile 0
20:33:29.591 INFO  TaskSetManager - Starting task 0.0 in stage 87.0 (TID 129) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:29.592 INFO  TaskSetManager - Starting task 1.0 in stage 87.0 (TID 130) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:29.592 INFO  Executor - Running task 1.0 in stage 87.0 (TID 130)
20:33:29.592 INFO  Executor - Running task 0.0 in stage 87.0 (TID 129)
20:33:29.594 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.594 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.594 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.594 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.598 INFO  Executor - Finished task 0.0 in stage 87.0 (TID 129). 1634 bytes result sent to driver
20:33:29.598 INFO  TaskSetManager - Finished task 0.0 in stage 87.0 (TID 129) in 7 ms on localhost (executor driver) (1/2)
20:33:29.599 INFO  Executor - Finished task 1.0 in stage 87.0 (TID 130). 1634 bytes result sent to driver
20:33:29.599 INFO  TaskSetManager - Finished task 1.0 in stage 87.0 (TID 130) in 7 ms on localhost (executor driver) (2/2)
20:33:29.599 INFO  TaskSchedulerImpl - Removed TaskSet 87.0, whose tasks have all completed, from pool 
20:33:29.599 INFO  DAGScheduler - ResultStage 87 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.010 s
20:33:29.599 INFO  DAGScheduler - Job 63 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.599 INFO  TaskSchedulerImpl - Killing all running tasks in stage 87: Stage finished
20:33:29.599 INFO  DAGScheduler - Job 63 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.011407 s
20:33:29.613 INFO  FileInputFormat - Total input files to process : 2
20:33:29.617 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:29.617 INFO  DAGScheduler - Got job 64 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:29.617 INFO  DAGScheduler - Final stage: ResultStage 88 (count at ReadsSparkSinkUnitTest.java:222)
20:33:29.617 INFO  DAGScheduler - Parents of final stage: List()
20:33:29.617 INFO  DAGScheduler - Missing parents: List()
20:33:29.617 INFO  DAGScheduler - Submitting ResultStage 88 (MapPartitionsRDD[369] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:29.634 INFO  MemoryStore - Block broadcast_158 stored as values in memory (estimated size 426.1 KiB, free 1918.9 MiB)
20:33:29.636 INFO  MemoryStore - Block broadcast_158_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.7 MiB)
20:33:29.636 INFO  BlockManagerInfo - Added broadcast_158_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:29.636 INFO  SparkContext - Created broadcast 158 from broadcast at DAGScheduler.scala:1580
20:33:29.636 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 88 (MapPartitionsRDD[369] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.636 INFO  TaskSchedulerImpl - Adding task set 88.0 with 2 tasks resource profile 0
20:33:29.637 INFO  TaskSetManager - Starting task 0.0 in stage 88.0 (TID 131) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7826 bytes) 
20:33:29.637 INFO  TaskSetManager - Starting task 1.0 in stage 88.0 (TID 132) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7826 bytes) 
20:33:29.637 INFO  Executor - Running task 1.0 in stage 88.0 (TID 132)
20:33:29.637 INFO  Executor - Running task 0.0 in stage 88.0 (TID 131)
20:33:29.685 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11467479994144900249.bam/part-r-00000.bam:0+132492
20:33:29.685 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11467479994144900249.bam/part-r-00001.bam:0+129330
20:33:29.698 INFO  Executor - Finished task 0.0 in stage 88.0 (TID 131). 989 bytes result sent to driver
20:33:29.698 INFO  Executor - Finished task 1.0 in stage 88.0 (TID 132). 989 bytes result sent to driver
20:33:29.698 INFO  TaskSetManager - Finished task 1.0 in stage 88.0 (TID 132) in 61 ms on localhost (executor driver) (1/2)
20:33:29.699 INFO  TaskSetManager - Finished task 0.0 in stage 88.0 (TID 131) in 61 ms on localhost (executor driver) (2/2)
20:33:29.699 INFO  TaskSchedulerImpl - Removed TaskSet 88.0, whose tasks have all completed, from pool 
20:33:29.699 INFO  DAGScheduler - ResultStage 88 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.081 s
20:33:29.699 INFO  DAGScheduler - Job 64 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.699 INFO  TaskSchedulerImpl - Killing all running tasks in stage 88: Stage finished
20:33:29.699 INFO  DAGScheduler - Job 64 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.082694 s
20:33:29.702 INFO  MemoryStore - Block broadcast_159 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:29.709 INFO  MemoryStore - Block broadcast_159_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:29.709 INFO  BlockManagerInfo - Added broadcast_159_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.709 INFO  SparkContext - Created broadcast 159 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.732 INFO  MemoryStore - Block broadcast_160 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:29.738 INFO  MemoryStore - Block broadcast_160_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.1 MiB)
20:33:29.738 INFO  BlockManagerInfo - Added broadcast_160_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:29.739 INFO  SparkContext - Created broadcast 160 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.759 INFO  MemoryStore - Block broadcast_161 stored as values in memory (estimated size 160.7 KiB, free 1917.9 MiB)
20:33:29.760 INFO  MemoryStore - Block broadcast_161_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.9 MiB)
20:33:29.760 INFO  BlockManagerInfo - Added broadcast_161_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:29.760 INFO  SparkContext - Created broadcast 161 from broadcast at ReadsSparkSink.java:133
20:33:29.762 INFO  MemoryStore - Block broadcast_162 stored as values in memory (estimated size 163.2 KiB, free 1917.7 MiB)
20:33:29.763 INFO  MemoryStore - Block broadcast_162_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:29.763 INFO  BlockManagerInfo - Added broadcast_162_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:29.763 INFO  SparkContext - Created broadcast 162 from broadcast at AnySamSinkMultiple.java:80
20:33:29.765 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.765 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.765 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.777 INFO  FileInputFormat - Total input files to process : 1
20:33:29.784 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:29.785 INFO  DAGScheduler - Registering RDD 377 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 19
20:33:29.785 INFO  DAGScheduler - Got job 65 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:29.785 INFO  DAGScheduler - Final stage: ResultStage 90 (runJob at SparkHadoopWriter.scala:83)
20:33:29.785 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 89)
20:33:29.785 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 89)
20:33:29.785 INFO  DAGScheduler - Submitting ShuffleMapStage 89 (MapPartitionsRDD[377] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:29.802 INFO  MemoryStore - Block broadcast_163 stored as values in memory (estimated size 427.7 KiB, free 1917.3 MiB)
20:33:29.803 INFO  MemoryStore - Block broadcast_163_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1917.2 MiB)
20:33:29.804 INFO  BlockManagerInfo - Added broadcast_163_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.5 MiB)
20:33:29.804 INFO  SparkContext - Created broadcast 163 from broadcast at DAGScheduler.scala:1580
20:33:29.804 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 89 (MapPartitionsRDD[377] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:29.804 INFO  TaskSchedulerImpl - Adding task set 89.0 with 1 tasks resource profile 0
20:33:29.805 INFO  TaskSetManager - Starting task 0.0 in stage 89.0 (TID 133) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:29.805 INFO  Executor - Running task 0.0 in stage 89.0 (TID 133)
20:33:29.835 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:29.853 INFO  Executor - Finished task 0.0 in stage 89.0 (TID 133). 1149 bytes result sent to driver
20:33:29.853 INFO  TaskSetManager - Finished task 0.0 in stage 89.0 (TID 133) in 49 ms on localhost (executor driver) (1/1)
20:33:29.853 INFO  TaskSchedulerImpl - Removed TaskSet 89.0, whose tasks have all completed, from pool 
20:33:29.854 INFO  DAGScheduler - ShuffleMapStage 89 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.068 s
20:33:29.854 INFO  DAGScheduler - looking for newly runnable stages
20:33:29.854 INFO  DAGScheduler - running: HashSet()
20:33:29.854 INFO  DAGScheduler - waiting: HashSet(ResultStage 90)
20:33:29.854 INFO  DAGScheduler - failed: HashSet()
20:33:29.854 INFO  DAGScheduler - Submitting ResultStage 90 (MapPartitionsRDD[389] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:29.860 INFO  MemoryStore - Block broadcast_164 stored as values in memory (estimated size 150.2 KiB, free 1917.0 MiB)
20:33:29.861 INFO  MemoryStore - Block broadcast_164_piece0 stored as bytes in memory (estimated size 56.2 KiB, free 1917.0 MiB)
20:33:29.861 INFO  BlockManagerInfo - Added broadcast_164_piece0 in memory on localhost:45281 (size: 56.2 KiB, free: 1919.4 MiB)
20:33:29.862 INFO  SparkContext - Created broadcast 164 from broadcast at DAGScheduler.scala:1580
20:33:29.862 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 90 (MapPartitionsRDD[389] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.862 INFO  TaskSchedulerImpl - Adding task set 90.0 with 2 tasks resource profile 0
20:33:29.863 INFO  TaskSetManager - Starting task 0.0 in stage 90.0 (TID 134) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:29.863 INFO  TaskSetManager - Starting task 1.0 in stage 90.0 (TID 135) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:29.863 INFO  Executor - Running task 0.0 in stage 90.0 (TID 134)
20:33:29.863 INFO  Executor - Running task 1.0 in stage 90.0 (TID 135)
20:33:29.870 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.870 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.870 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.870 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.870 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.870 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.870 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.870 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.870 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.870 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:29.870 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:29.870 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:29.882 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.882 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.884 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.885 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.889 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033297654478134126746992_0389_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest19760866139169659782.bam/_temporary/0/task_202507152033297654478134126746992_0389_r_000001
20:33:29.890 INFO  SparkHadoopMapRedUtil - attempt_202507152033297654478134126746992_0389_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:29.891 INFO  Executor - Finished task 1.0 in stage 90.0 (TID 135). 1729 bytes result sent to driver
20:33:29.891 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033297654478134126746992_0389_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest19760866139169659782.bam/_temporary/0/task_202507152033297654478134126746992_0389_r_000000
20:33:29.891 INFO  SparkHadoopMapRedUtil - attempt_202507152033297654478134126746992_0389_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:29.891 INFO  TaskSetManager - Finished task 1.0 in stage 90.0 (TID 135) in 28 ms on localhost (executor driver) (1/2)
20:33:29.892 INFO  Executor - Finished task 0.0 in stage 90.0 (TID 134). 1729 bytes result sent to driver
20:33:29.892 INFO  TaskSetManager - Finished task 0.0 in stage 90.0 (TID 134) in 30 ms on localhost (executor driver) (2/2)
20:33:29.892 INFO  TaskSchedulerImpl - Removed TaskSet 90.0, whose tasks have all completed, from pool 
20:33:29.892 INFO  DAGScheduler - ResultStage 90 (runJob at SparkHadoopWriter.scala:83) finished in 0.038 s
20:33:29.892 INFO  DAGScheduler - Job 65 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.892 INFO  TaskSchedulerImpl - Killing all running tasks in stage 90: Stage finished
20:33:29.892 INFO  DAGScheduler - Job 65 finished: runJob at SparkHadoopWriter.scala:83, took 0.108238 s
20:33:29.893 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033297654478134126746992_0389.
20:33:29.898 INFO  SparkHadoopWriter - Write Job job_202507152033297654478134126746992_0389 committed. Elapsed time: 5 ms.
20:33:29.901 INFO  MemoryStore - Block broadcast_165 stored as values in memory (estimated size 297.9 KiB, free 1916.7 MiB)
20:33:29.912 INFO  MemoryStore - Block broadcast_165_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.6 MiB)
20:33:29.912 INFO  BlockManagerInfo - Added broadcast_165_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:29.912 INFO  SparkContext - Created broadcast 165 from newAPIHadoopFile at PathSplitSource.java:96
20:33:29.939 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:29.940 INFO  DAGScheduler - Got job 66 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:29.940 INFO  DAGScheduler - Final stage: ResultStage 92 (count at ReadsSparkSinkUnitTest.java:222)
20:33:29.940 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 91)
20:33:29.940 INFO  DAGScheduler - Missing parents: List()
20:33:29.940 INFO  DAGScheduler - Submitting ResultStage 92 (MapPartitionsRDD[380] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:29.941 INFO  MemoryStore - Block broadcast_166 stored as values in memory (estimated size 6.3 KiB, free 1916.6 MiB)
20:33:29.942 INFO  MemoryStore - Block broadcast_166_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1916.6 MiB)
20:33:29.942 INFO  BlockManagerInfo - Added broadcast_166_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.4 MiB)
20:33:29.942 INFO  SparkContext - Created broadcast 166 from broadcast at DAGScheduler.scala:1580
20:33:29.942 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 92 (MapPartitionsRDD[380] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.942 INFO  TaskSchedulerImpl - Adding task set 92.0 with 2 tasks resource profile 0
20:33:29.943 INFO  TaskSetManager - Starting task 0.0 in stage 92.0 (TID 136) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:29.943 INFO  TaskSetManager - Starting task 1.0 in stage 92.0 (TID 137) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:29.944 INFO  Executor - Running task 0.0 in stage 92.0 (TID 136)
20:33:29.944 INFO  Executor - Running task 1.0 in stage 92.0 (TID 137)
20:33:29.945 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.945 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:29.945 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.945 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:29.949 INFO  Executor - Finished task 0.0 in stage 92.0 (TID 136). 1591 bytes result sent to driver
20:33:29.949 INFO  TaskSetManager - Finished task 0.0 in stage 92.0 (TID 136) in 6 ms on localhost (executor driver) (1/2)
20:33:29.950 INFO  Executor - Finished task 1.0 in stage 92.0 (TID 137). 1591 bytes result sent to driver
20:33:29.950 INFO  TaskSetManager - Finished task 1.0 in stage 92.0 (TID 137) in 7 ms on localhost (executor driver) (2/2)
20:33:29.950 INFO  TaskSchedulerImpl - Removed TaskSet 92.0, whose tasks have all completed, from pool 
20:33:29.950 INFO  DAGScheduler - ResultStage 92 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.009 s
20:33:29.950 INFO  DAGScheduler - Job 66 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:29.950 INFO  TaskSchedulerImpl - Killing all running tasks in stage 92: Stage finished
20:33:29.950 INFO  DAGScheduler - Job 66 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.010969 s
20:33:29.963 INFO  FileInputFormat - Total input files to process : 2
20:33:29.968 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:29.968 INFO  DAGScheduler - Got job 67 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:29.968 INFO  DAGScheduler - Final stage: ResultStage 93 (count at ReadsSparkSinkUnitTest.java:222)
20:33:29.968 INFO  DAGScheduler - Parents of final stage: List()
20:33:29.968 INFO  DAGScheduler - Missing parents: List()
20:33:29.969 INFO  DAGScheduler - Submitting ResultStage 93 (MapPartitionsRDD[396] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:29.986 INFO  MemoryStore - Block broadcast_167 stored as values in memory (estimated size 426.1 KiB, free 1916.2 MiB)
20:33:29.993 INFO  BlockManagerInfo - Removed broadcast_150_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:29.993 INFO  MemoryStore - Block broadcast_167_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.4 MiB)
20:33:29.994 INFO  BlockManagerInfo - Added broadcast_167_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.3 MiB)
20:33:29.994 INFO  SparkContext - Created broadcast 167 from broadcast at DAGScheduler.scala:1580
20:33:29.994 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 93 (MapPartitionsRDD[396] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:29.994 INFO  TaskSchedulerImpl - Adding task set 93.0 with 2 tasks resource profile 0
20:33:29.995 INFO  BlockManagerInfo - Removed broadcast_162_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:29.995 INFO  TaskSetManager - Starting task 0.0 in stage 93.0 (TID 138) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7826 bytes) 
20:33:29.995 INFO  TaskSetManager - Starting task 1.0 in stage 93.0 (TID 139) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7826 bytes) 
20:33:29.996 INFO  Executor - Running task 1.0 in stage 93.0 (TID 139)
20:33:29.996 INFO  Executor - Running task 0.0 in stage 93.0 (TID 138)
20:33:29.996 INFO  BlockManagerInfo - Removed broadcast_158_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:29.997 INFO  BlockManagerInfo - Removed broadcast_157_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.4 MiB)
20:33:29.997 INFO  BlockManagerInfo - Removed broadcast_164_piece0 on localhost:45281 in memory (size: 56.2 KiB, free: 1919.5 MiB)
20:33:29.998 INFO  BlockManagerInfo - Removed broadcast_161_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.5 MiB)
20:33:29.999 INFO  BlockManagerInfo - Removed broadcast_160_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:30.000 INFO  BlockManagerInfo - Removed broadcast_163_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.7 MiB)
20:33:30.000 INFO  BlockManagerInfo - Removed broadcast_166_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.7 MiB)
20:33:30.001 INFO  BlockManagerInfo - Removed broadcast_156_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:30.028 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest19760866139169659782.bam/part-r-00000.bam:0+132492
20:33:30.037 INFO  Executor - Finished task 1.0 in stage 93.0 (TID 139). 989 bytes result sent to driver
20:33:30.038 INFO  TaskSetManager - Finished task 1.0 in stage 93.0 (TID 139) in 43 ms on localhost (executor driver) (1/2)
20:33:30.042 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest19760866139169659782.bam/part-r-00001.bam:0+129330
20:33:30.055 INFO  Executor - Finished task 0.0 in stage 93.0 (TID 138). 989 bytes result sent to driver
20:33:30.055 INFO  TaskSetManager - Finished task 0.0 in stage 93.0 (TID 138) in 60 ms on localhost (executor driver) (2/2)
20:33:30.055 INFO  TaskSchedulerImpl - Removed TaskSet 93.0, whose tasks have all completed, from pool 
20:33:30.055 INFO  DAGScheduler - ResultStage 93 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.086 s
20:33:30.056 INFO  DAGScheduler - Job 67 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.056 INFO  TaskSchedulerImpl - Killing all running tasks in stage 93: Stage finished
20:33:30.056 INFO  DAGScheduler - Job 67 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.087749 s
20:33:30.059 INFO  MemoryStore - Block broadcast_168 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:30.065 INFO  MemoryStore - Block broadcast_168_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:30.066 INFO  BlockManagerInfo - Added broadcast_168_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:30.066 INFO  SparkContext - Created broadcast 168 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.090 INFO  MemoryStore - Block broadcast_169 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:30.096 INFO  MemoryStore - Block broadcast_169_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.1 MiB)
20:33:30.096 INFO  BlockManagerInfo - Added broadcast_169_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:30.097 INFO  SparkContext - Created broadcast 169 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.117 INFO  MemoryStore - Block broadcast_170 stored as values in memory (estimated size 160.7 KiB, free 1917.9 MiB)
20:33:30.118 INFO  MemoryStore - Block broadcast_170_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.9 MiB)
20:33:30.118 INFO  BlockManagerInfo - Added broadcast_170_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:30.118 INFO  SparkContext - Created broadcast 170 from broadcast at ReadsSparkSink.java:133
20:33:30.120 INFO  MemoryStore - Block broadcast_171 stored as values in memory (estimated size 163.2 KiB, free 1917.7 MiB)
20:33:30.120 INFO  MemoryStore - Block broadcast_171_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:30.120 INFO  BlockManagerInfo - Added broadcast_171_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:30.121 INFO  SparkContext - Created broadcast 171 from broadcast at AnySamSinkMultiple.java:80
20:33:30.122 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.122 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.122 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.134 INFO  FileInputFormat - Total input files to process : 1
20:33:30.141 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:30.141 INFO  DAGScheduler - Registering RDD 404 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 20
20:33:30.142 INFO  DAGScheduler - Got job 68 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:30.142 INFO  DAGScheduler - Final stage: ResultStage 95 (runJob at SparkHadoopWriter.scala:83)
20:33:30.142 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 94)
20:33:30.142 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 94)
20:33:30.142 INFO  DAGScheduler - Submitting ShuffleMapStage 94 (MapPartitionsRDD[404] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:30.161 INFO  MemoryStore - Block broadcast_172 stored as values in memory (estimated size 427.7 KiB, free 1917.3 MiB)
20:33:30.162 INFO  MemoryStore - Block broadcast_172_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1917.2 MiB)
20:33:30.162 INFO  BlockManagerInfo - Added broadcast_172_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.5 MiB)
20:33:30.163 INFO  SparkContext - Created broadcast 172 from broadcast at DAGScheduler.scala:1580
20:33:30.163 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 94 (MapPartitionsRDD[404] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:30.163 INFO  TaskSchedulerImpl - Adding task set 94.0 with 1 tasks resource profile 0
20:33:30.164 INFO  TaskSetManager - Starting task 0.0 in stage 94.0 (TID 140) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:30.164 INFO  Executor - Running task 0.0 in stage 94.0 (TID 140)
20:33:30.194 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:30.211 INFO  Executor - Finished task 0.0 in stage 94.0 (TID 140). 1149 bytes result sent to driver
20:33:30.211 INFO  TaskSetManager - Finished task 0.0 in stage 94.0 (TID 140) in 48 ms on localhost (executor driver) (1/1)
20:33:30.211 INFO  TaskSchedulerImpl - Removed TaskSet 94.0, whose tasks have all completed, from pool 
20:33:30.211 INFO  DAGScheduler - ShuffleMapStage 94 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.069 s
20:33:30.211 INFO  DAGScheduler - looking for newly runnable stages
20:33:30.212 INFO  DAGScheduler - running: HashSet()
20:33:30.212 INFO  DAGScheduler - waiting: HashSet(ResultStage 95)
20:33:30.212 INFO  DAGScheduler - failed: HashSet()
20:33:30.212 INFO  DAGScheduler - Submitting ResultStage 95 (MapPartitionsRDD[416] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:30.218 INFO  MemoryStore - Block broadcast_173 stored as values in memory (estimated size 150.2 KiB, free 1917.0 MiB)
20:33:30.219 INFO  MemoryStore - Block broadcast_173_piece0 stored as bytes in memory (estimated size 56.3 KiB, free 1917.0 MiB)
20:33:30.219 INFO  BlockManagerInfo - Added broadcast_173_piece0 in memory on localhost:45281 (size: 56.3 KiB, free: 1919.4 MiB)
20:33:30.219 INFO  SparkContext - Created broadcast 173 from broadcast at DAGScheduler.scala:1580
20:33:30.220 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 95 (MapPartitionsRDD[416] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.220 INFO  TaskSchedulerImpl - Adding task set 95.0 with 2 tasks resource profile 0
20:33:30.220 INFO  TaskSetManager - Starting task 0.0 in stage 95.0 (TID 141) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:30.220 INFO  TaskSetManager - Starting task 1.0 in stage 95.0 (TID 142) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:30.221 INFO  Executor - Running task 0.0 in stage 95.0 (TID 141)
20:33:30.221 INFO  Executor - Running task 1.0 in stage 95.0 (TID 142)
20:33:30.225 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.225 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.225 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.226 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.226 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.226 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.227 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.227 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.227 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.227 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.227 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.227 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.238 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.238 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.239 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.239 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.245 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033303716978399463255936_0416_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest115208765742964097063.bam/_temporary/0/task_202507152033303716978399463255936_0416_r_000001
20:33:30.245 INFO  SparkHadoopMapRedUtil - attempt_202507152033303716978399463255936_0416_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:30.246 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033303716978399463255936_0416_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest115208765742964097063.bam/_temporary/0/task_202507152033303716978399463255936_0416_r_000000
20:33:30.246 INFO  SparkHadoopMapRedUtil - attempt_202507152033303716978399463255936_0416_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:30.246 INFO  Executor - Finished task 1.0 in stage 95.0 (TID 142). 1729 bytes result sent to driver
20:33:30.246 INFO  Executor - Finished task 0.0 in stage 95.0 (TID 141). 1729 bytes result sent to driver
20:33:30.246 INFO  TaskSetManager - Finished task 1.0 in stage 95.0 (TID 142) in 26 ms on localhost (executor driver) (1/2)
20:33:30.247 INFO  TaskSetManager - Finished task 0.0 in stage 95.0 (TID 141) in 27 ms on localhost (executor driver) (2/2)
20:33:30.247 INFO  TaskSchedulerImpl - Removed TaskSet 95.0, whose tasks have all completed, from pool 
20:33:30.247 INFO  DAGScheduler - ResultStage 95 (runJob at SparkHadoopWriter.scala:83) finished in 0.035 s
20:33:30.247 INFO  DAGScheduler - Job 68 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.247 INFO  TaskSchedulerImpl - Killing all running tasks in stage 95: Stage finished
20:33:30.247 INFO  DAGScheduler - Job 68 finished: runJob at SparkHadoopWriter.scala:83, took 0.106196 s
20:33:30.247 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033303716978399463255936_0416.
20:33:30.253 INFO  SparkHadoopWriter - Write Job job_202507152033303716978399463255936_0416 committed. Elapsed time: 5 ms.
20:33:30.256 INFO  MemoryStore - Block broadcast_174 stored as values in memory (estimated size 297.9 KiB, free 1916.7 MiB)
20:33:30.266 INFO  MemoryStore - Block broadcast_174_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.6 MiB)
20:33:30.267 INFO  BlockManagerInfo - Added broadcast_174_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:30.267 INFO  SparkContext - Created broadcast 174 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.296 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:30.297 INFO  DAGScheduler - Got job 69 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:30.297 INFO  DAGScheduler - Final stage: ResultStage 97 (count at ReadsSparkSinkUnitTest.java:222)
20:33:30.297 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 96)
20:33:30.297 INFO  DAGScheduler - Missing parents: List()
20:33:30.297 INFO  DAGScheduler - Submitting ResultStage 97 (MapPartitionsRDD[407] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:30.298 INFO  MemoryStore - Block broadcast_175 stored as values in memory (estimated size 6.3 KiB, free 1916.6 MiB)
20:33:30.298 INFO  MemoryStore - Block broadcast_175_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1916.6 MiB)
20:33:30.298 INFO  BlockManagerInfo - Added broadcast_175_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.4 MiB)
20:33:30.299 INFO  SparkContext - Created broadcast 175 from broadcast at DAGScheduler.scala:1580
20:33:30.299 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 97 (MapPartitionsRDD[407] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.299 INFO  TaskSchedulerImpl - Adding task set 97.0 with 2 tasks resource profile 0
20:33:30.300 INFO  TaskSetManager - Starting task 0.0 in stage 97.0 (TID 143) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:30.300 INFO  TaskSetManager - Starting task 1.0 in stage 97.0 (TID 144) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:30.300 INFO  Executor - Running task 0.0 in stage 97.0 (TID 143)
20:33:30.300 INFO  Executor - Running task 1.0 in stage 97.0 (TID 144)
20:33:30.302 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.302 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.302 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.302 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.306 INFO  Executor - Finished task 1.0 in stage 97.0 (TID 144). 1634 bytes result sent to driver
20:33:30.306 INFO  TaskSetManager - Finished task 1.0 in stage 97.0 (TID 144) in 6 ms on localhost (executor driver) (1/2)
20:33:30.307 INFO  Executor - Finished task 0.0 in stage 97.0 (TID 143). 1634 bytes result sent to driver
20:33:30.307 INFO  TaskSetManager - Finished task 0.0 in stage 97.0 (TID 143) in 7 ms on localhost (executor driver) (2/2)
20:33:30.307 INFO  TaskSchedulerImpl - Removed TaskSet 97.0, whose tasks have all completed, from pool 
20:33:30.307 INFO  DAGScheduler - ResultStage 97 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.010 s
20:33:30.307 INFO  DAGScheduler - Job 69 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.307 INFO  TaskSchedulerImpl - Killing all running tasks in stage 97: Stage finished
20:33:30.307 INFO  DAGScheduler - Job 69 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.011163 s
20:33:30.320 INFO  FileInputFormat - Total input files to process : 2
20:33:30.324 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:30.325 INFO  DAGScheduler - Got job 70 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:30.325 INFO  DAGScheduler - Final stage: ResultStage 98 (count at ReadsSparkSinkUnitTest.java:222)
20:33:30.325 INFO  DAGScheduler - Parents of final stage: List()
20:33:30.325 INFO  DAGScheduler - Missing parents: List()
20:33:30.325 INFO  DAGScheduler - Submitting ResultStage 98 (MapPartitionsRDD[423] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:30.342 INFO  MemoryStore - Block broadcast_176 stored as values in memory (estimated size 426.1 KiB, free 1916.2 MiB)
20:33:30.343 INFO  MemoryStore - Block broadcast_176_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.1 MiB)
20:33:30.343 INFO  BlockManagerInfo - Added broadcast_176_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.2 MiB)
20:33:30.344 INFO  SparkContext - Created broadcast 176 from broadcast at DAGScheduler.scala:1580
20:33:30.344 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 98 (MapPartitionsRDD[423] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.344 INFO  TaskSchedulerImpl - Adding task set 98.0 with 2 tasks resource profile 0
20:33:30.345 INFO  TaskSetManager - Starting task 0.0 in stage 98.0 (TID 145) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7827 bytes) 
20:33:30.345 INFO  TaskSetManager - Starting task 1.0 in stage 98.0 (TID 146) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7827 bytes) 
20:33:30.345 INFO  Executor - Running task 0.0 in stage 98.0 (TID 145)
20:33:30.345 INFO  Executor - Running task 1.0 in stage 98.0 (TID 146)
20:33:30.377 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest115208765742964097063.bam/part-r-00000.bam:0+132492
20:33:30.381 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest115208765742964097063.bam/part-r-00001.bam:0+129330
20:33:30.387 INFO  Executor - Finished task 1.0 in stage 98.0 (TID 146). 989 bytes result sent to driver
20:33:30.388 INFO  TaskSetManager - Finished task 1.0 in stage 98.0 (TID 146) in 43 ms on localhost (executor driver) (1/2)
20:33:30.394 INFO  Executor - Finished task 0.0 in stage 98.0 (TID 145). 989 bytes result sent to driver
20:33:30.394 INFO  TaskSetManager - Finished task 0.0 in stage 98.0 (TID 145) in 50 ms on localhost (executor driver) (2/2)
20:33:30.394 INFO  TaskSchedulerImpl - Removed TaskSet 98.0, whose tasks have all completed, from pool 
20:33:30.394 INFO  DAGScheduler - ResultStage 98 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.069 s
20:33:30.394 INFO  DAGScheduler - Job 70 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.394 INFO  TaskSchedulerImpl - Killing all running tasks in stage 98: Stage finished
20:33:30.394 INFO  DAGScheduler - Job 70 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.069974 s
20:33:30.397 INFO  MemoryStore - Block broadcast_177 stored as values in memory (estimated size 298.0 KiB, free 1915.8 MiB)
20:33:30.407 INFO  MemoryStore - Block broadcast_177_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1915.7 MiB)
20:33:30.407 INFO  BlockManagerInfo - Added broadcast_177_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.2 MiB)
20:33:30.407 INFO  SparkContext - Created broadcast 177 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.430 INFO  MemoryStore - Block broadcast_178 stored as values in memory (estimated size 298.0 KiB, free 1915.4 MiB)
20:33:30.441 INFO  BlockManagerInfo - Removed broadcast_167_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.3 MiB)
20:33:30.442 INFO  BlockManagerInfo - Removed broadcast_175_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.3 MiB)
20:33:30.443 INFO  BlockManagerInfo - Removed broadcast_165_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:30.444 INFO  BlockManagerInfo - Removed broadcast_172_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.5 MiB)
20:33:30.445 INFO  BlockManagerInfo - Removed broadcast_168_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:30.445 INFO  BlockManagerInfo - Removed broadcast_171_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:30.446 INFO  BlockManagerInfo - Removed broadcast_176_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:30.446 INFO  BlockManagerInfo - Removed broadcast_170_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:30.447 INFO  BlockManagerInfo - Removed broadcast_174_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:30.447 INFO  MemoryStore - Block broadcast_178_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1918.4 MiB)
20:33:30.448 INFO  BlockManagerInfo - Added broadcast_178_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.7 MiB)
20:33:30.448 INFO  BlockManagerInfo - Removed broadcast_159_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:30.448 INFO  SparkContext - Created broadcast 178 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.449 INFO  BlockManagerInfo - Removed broadcast_173_piece0 on localhost:45281 in memory (size: 56.3 KiB, free: 1919.9 MiB)
20:33:30.449 INFO  BlockManagerInfo - Removed broadcast_169_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.9 MiB)
20:33:30.469 INFO  MemoryStore - Block broadcast_179 stored as values in memory (estimated size 160.7 KiB, free 1919.2 MiB)
20:33:30.470 INFO  MemoryStore - Block broadcast_179_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.2 MiB)
20:33:30.470 INFO  BlockManagerInfo - Added broadcast_179_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:30.470 INFO  SparkContext - Created broadcast 179 from broadcast at ReadsSparkSink.java:133
20:33:30.471 INFO  MemoryStore - Block broadcast_180 stored as values in memory (estimated size 163.2 KiB, free 1919.0 MiB)
20:33:30.472 INFO  MemoryStore - Block broadcast_180_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.0 MiB)
20:33:30.472 INFO  BlockManagerInfo - Added broadcast_180_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:30.472 INFO  SparkContext - Created broadcast 180 from broadcast at AnySamSinkMultiple.java:80
20:33:30.475 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.475 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.475 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.487 INFO  FileInputFormat - Total input files to process : 1
20:33:30.493 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:30.493 INFO  DAGScheduler - Registering RDD 431 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 21
20:33:30.494 INFO  DAGScheduler - Got job 71 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:30.494 INFO  DAGScheduler - Final stage: ResultStage 100 (runJob at SparkHadoopWriter.scala:83)
20:33:30.494 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 99)
20:33:30.494 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 99)
20:33:30.494 INFO  DAGScheduler - Submitting ShuffleMapStage 99 (MapPartitionsRDD[431] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:30.511 INFO  MemoryStore - Block broadcast_181 stored as values in memory (estimated size 427.7 KiB, free 1918.6 MiB)
20:33:30.512 INFO  MemoryStore - Block broadcast_181_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1918.4 MiB)
20:33:30.512 INFO  BlockManagerInfo - Added broadcast_181_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.7 MiB)
20:33:30.512 INFO  SparkContext - Created broadcast 181 from broadcast at DAGScheduler.scala:1580
20:33:30.512 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 99 (MapPartitionsRDD[431] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:30.513 INFO  TaskSchedulerImpl - Adding task set 99.0 with 1 tasks resource profile 0
20:33:30.513 INFO  TaskSetManager - Starting task 0.0 in stage 99.0 (TID 147) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7901 bytes) 
20:33:30.514 INFO  Executor - Running task 0.0 in stage 99.0 (TID 147)
20:33:30.544 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:30.570 INFO  Executor - Finished task 0.0 in stage 99.0 (TID 147). 1149 bytes result sent to driver
20:33:30.570 INFO  TaskSetManager - Finished task 0.0 in stage 99.0 (TID 147) in 57 ms on localhost (executor driver) (1/1)
20:33:30.570 INFO  TaskSchedulerImpl - Removed TaskSet 99.0, whose tasks have all completed, from pool 
20:33:30.570 INFO  DAGScheduler - ShuffleMapStage 99 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.076 s
20:33:30.570 INFO  DAGScheduler - looking for newly runnable stages
20:33:30.570 INFO  DAGScheduler - running: HashSet()
20:33:30.571 INFO  DAGScheduler - waiting: HashSet(ResultStage 100)
20:33:30.571 INFO  DAGScheduler - failed: HashSet()
20:33:30.571 INFO  DAGScheduler - Submitting ResultStage 100 (MapPartitionsRDD[443] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:30.577 INFO  MemoryStore - Block broadcast_182 stored as values in memory (estimated size 150.2 KiB, free 1918.3 MiB)
20:33:30.578 INFO  MemoryStore - Block broadcast_182_piece0 stored as bytes in memory (estimated size 56.2 KiB, free 1918.2 MiB)
20:33:30.578 INFO  BlockManagerInfo - Added broadcast_182_piece0 in memory on localhost:45281 (size: 56.2 KiB, free: 1919.7 MiB)
20:33:30.578 INFO  SparkContext - Created broadcast 182 from broadcast at DAGScheduler.scala:1580
20:33:30.579 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 100 (MapPartitionsRDD[443] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.579 INFO  TaskSchedulerImpl - Adding task set 100.0 with 2 tasks resource profile 0
20:33:30.579 INFO  TaskSetManager - Starting task 0.0 in stage 100.0 (TID 148) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:30.579 INFO  TaskSetManager - Starting task 1.0 in stage 100.0 (TID 149) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:30.580 INFO  Executor - Running task 0.0 in stage 100.0 (TID 148)
20:33:30.580 INFO  Executor - Running task 1.0 in stage 100.0 (TID 149)
20:33:30.584 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.584 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.584 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.584 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.584 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.584 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.586 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.586 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.586 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.586 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.586 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.586 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.597 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.597 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.598 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.598 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.604 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033309184699013027991769_0443_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest217716857072560998002.bam/_temporary/0/task_202507152033309184699013027991769_0443_r_000001
20:33:30.605 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033309184699013027991769_0443_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest217716857072560998002.bam/_temporary/0/task_202507152033309184699013027991769_0443_r_000000
20:33:30.605 INFO  SparkHadoopMapRedUtil - attempt_202507152033309184699013027991769_0443_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:30.605 INFO  SparkHadoopMapRedUtil - attempt_202507152033309184699013027991769_0443_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:30.605 INFO  Executor - Finished task 1.0 in stage 100.0 (TID 149). 1729 bytes result sent to driver
20:33:30.605 INFO  Executor - Finished task 0.0 in stage 100.0 (TID 148). 1729 bytes result sent to driver
20:33:30.606 INFO  TaskSetManager - Finished task 0.0 in stage 100.0 (TID 148) in 27 ms on localhost (executor driver) (1/2)
20:33:30.606 INFO  TaskSetManager - Finished task 1.0 in stage 100.0 (TID 149) in 27 ms on localhost (executor driver) (2/2)
20:33:30.606 INFO  TaskSchedulerImpl - Removed TaskSet 100.0, whose tasks have all completed, from pool 
20:33:30.606 INFO  DAGScheduler - ResultStage 100 (runJob at SparkHadoopWriter.scala:83) finished in 0.035 s
20:33:30.606 INFO  DAGScheduler - Job 71 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.606 INFO  TaskSchedulerImpl - Killing all running tasks in stage 100: Stage finished
20:33:30.606 INFO  DAGScheduler - Job 71 finished: runJob at SparkHadoopWriter.scala:83, took 0.113251 s
20:33:30.607 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033309184699013027991769_0443.
20:33:30.612 INFO  SparkHadoopWriter - Write Job job_202507152033309184699013027991769_0443 committed. Elapsed time: 5 ms.
20:33:30.615 INFO  MemoryStore - Block broadcast_183 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:30.626 INFO  MemoryStore - Block broadcast_183_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:30.626 INFO  BlockManagerInfo - Added broadcast_183_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:30.626 INFO  SparkContext - Created broadcast 183 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.652 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:30.652 INFO  DAGScheduler - Got job 72 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:30.652 INFO  DAGScheduler - Final stage: ResultStage 102 (count at ReadsSparkSinkUnitTest.java:222)
20:33:30.652 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 101)
20:33:30.652 INFO  DAGScheduler - Missing parents: List()
20:33:30.653 INFO  DAGScheduler - Submitting ResultStage 102 (MapPartitionsRDD[434] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:30.653 INFO  MemoryStore - Block broadcast_184 stored as values in memory (estimated size 6.3 KiB, free 1917.9 MiB)
20:33:30.654 INFO  MemoryStore - Block broadcast_184_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1917.9 MiB)
20:33:30.654 INFO  BlockManagerInfo - Added broadcast_184_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.6 MiB)
20:33:30.654 INFO  SparkContext - Created broadcast 184 from broadcast at DAGScheduler.scala:1580
20:33:30.654 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 102 (MapPartitionsRDD[434] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.654 INFO  TaskSchedulerImpl - Adding task set 102.0 with 2 tasks resource profile 0
20:33:30.655 INFO  TaskSetManager - Starting task 0.0 in stage 102.0 (TID 150) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:30.655 INFO  TaskSetManager - Starting task 1.0 in stage 102.0 (TID 151) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:30.656 INFO  Executor - Running task 1.0 in stage 102.0 (TID 151)
20:33:30.656 INFO  Executor - Running task 0.0 in stage 102.0 (TID 150)
20:33:30.657 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.657 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.657 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.657 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.661 INFO  Executor - Finished task 0.0 in stage 102.0 (TID 150). 1634 bytes result sent to driver
20:33:30.661 INFO  TaskSetManager - Finished task 0.0 in stage 102.0 (TID 150) in 6 ms on localhost (executor driver) (1/2)
20:33:30.661 INFO  Executor - Finished task 1.0 in stage 102.0 (TID 151). 1634 bytes result sent to driver
20:33:30.662 INFO  TaskSetManager - Finished task 1.0 in stage 102.0 (TID 151) in 7 ms on localhost (executor driver) (2/2)
20:33:30.662 INFO  TaskSchedulerImpl - Removed TaskSet 102.0, whose tasks have all completed, from pool 
20:33:30.662 INFO  DAGScheduler - ResultStage 102 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.009 s
20:33:30.662 INFO  DAGScheduler - Job 72 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.662 INFO  TaskSchedulerImpl - Killing all running tasks in stage 102: Stage finished
20:33:30.662 INFO  DAGScheduler - Job 72 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.010103 s
20:33:30.675 INFO  FileInputFormat - Total input files to process : 2
20:33:30.680 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:30.680 INFO  DAGScheduler - Got job 73 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:30.680 INFO  DAGScheduler - Final stage: ResultStage 103 (count at ReadsSparkSinkUnitTest.java:222)
20:33:30.680 INFO  DAGScheduler - Parents of final stage: List()
20:33:30.680 INFO  DAGScheduler - Missing parents: List()
20:33:30.680 INFO  DAGScheduler - Submitting ResultStage 103 (MapPartitionsRDD[450] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:30.698 INFO  MemoryStore - Block broadcast_185 stored as values in memory (estimated size 426.1 KiB, free 1917.4 MiB)
20:33:30.699 INFO  MemoryStore - Block broadcast_185_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.3 MiB)
20:33:30.699 INFO  BlockManagerInfo - Added broadcast_185_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:30.699 INFO  SparkContext - Created broadcast 185 from broadcast at DAGScheduler.scala:1580
20:33:30.700 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 103 (MapPartitionsRDD[450] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.700 INFO  TaskSchedulerImpl - Adding task set 103.0 with 2 tasks resource profile 0
20:33:30.700 INFO  TaskSetManager - Starting task 0.0 in stage 103.0 (TID 152) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7827 bytes) 
20:33:30.700 INFO  TaskSetManager - Starting task 1.0 in stage 103.0 (TID 153) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7827 bytes) 
20:33:30.701 INFO  Executor - Running task 0.0 in stage 103.0 (TID 152)
20:33:30.701 INFO  Executor - Running task 1.0 in stage 103.0 (TID 153)
20:33:30.730 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest217716857072560998002.bam/part-r-00000.bam:0+129755
20:33:30.731 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest217716857072560998002.bam/part-r-00001.bam:0+129440
20:33:30.740 INFO  Executor - Finished task 0.0 in stage 103.0 (TID 152). 989 bytes result sent to driver
20:33:30.740 INFO  Executor - Finished task 1.0 in stage 103.0 (TID 153). 989 bytes result sent to driver
20:33:30.740 INFO  TaskSetManager - Finished task 0.0 in stage 103.0 (TID 152) in 40 ms on localhost (executor driver) (1/2)
20:33:30.740 INFO  TaskSetManager - Finished task 1.0 in stage 103.0 (TID 153) in 40 ms on localhost (executor driver) (2/2)
20:33:30.741 INFO  TaskSchedulerImpl - Removed TaskSet 103.0, whose tasks have all completed, from pool 
20:33:30.741 INFO  DAGScheduler - ResultStage 103 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.060 s
20:33:30.741 INFO  DAGScheduler - Job 73 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.741 INFO  TaskSchedulerImpl - Killing all running tasks in stage 103: Stage finished
20:33:30.741 INFO  DAGScheduler - Job 73 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.061433 s
20:33:30.744 INFO  MemoryStore - Block broadcast_186 stored as values in memory (estimated size 298.0 KiB, free 1917.0 MiB)
20:33:30.750 INFO  MemoryStore - Block broadcast_186_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.0 MiB)
20:33:30.750 INFO  BlockManagerInfo - Added broadcast_186_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:30.750 INFO  SparkContext - Created broadcast 186 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.773 INFO  MemoryStore - Block broadcast_187 stored as values in memory (estimated size 298.0 KiB, free 1916.7 MiB)
20:33:30.779 INFO  MemoryStore - Block broadcast_187_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.6 MiB)
20:33:30.779 INFO  BlockManagerInfo - Added broadcast_187_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:30.780 INFO  SparkContext - Created broadcast 187 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.803 INFO  MemoryStore - Block broadcast_188 stored as values in memory (estimated size 19.6 KiB, free 1916.6 MiB)
20:33:30.804 INFO  MemoryStore - Block broadcast_188_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1916.6 MiB)
20:33:30.804 INFO  BlockManagerInfo - Added broadcast_188_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.4 MiB)
20:33:30.804 INFO  SparkContext - Created broadcast 188 from broadcast at ReadsSparkSink.java:133
20:33:30.805 INFO  MemoryStore - Block broadcast_189 stored as values in memory (estimated size 20.0 KiB, free 1916.6 MiB)
20:33:30.805 INFO  MemoryStore - Block broadcast_189_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1916.6 MiB)
20:33:30.806 INFO  BlockManagerInfo - Added broadcast_189_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.4 MiB)
20:33:30.806 INFO  SparkContext - Created broadcast 189 from broadcast at AnySamSinkMultiple.java:80
20:33:30.807 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.807 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.807 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.820 INFO  FileInputFormat - Total input files to process : 1
20:33:30.826 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:30.827 INFO  DAGScheduler - Registering RDD 458 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 22
20:33:30.827 INFO  DAGScheduler - Got job 74 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:30.827 INFO  DAGScheduler - Final stage: ResultStage 105 (runJob at SparkHadoopWriter.scala:83)
20:33:30.827 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 104)
20:33:30.827 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 104)
20:33:30.827 INFO  DAGScheduler - Submitting ShuffleMapStage 104 (MapPartitionsRDD[458] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:30.856 INFO  MemoryStore - Block broadcast_190 stored as values in memory (estimated size 427.7 KiB, free 1916.2 MiB)
20:33:30.865 INFO  BlockManagerInfo - Removed broadcast_180_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:30.866 INFO  BlockManagerInfo - Removed broadcast_184_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.4 MiB)
20:33:30.866 INFO  MemoryStore - Block broadcast_190_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1916.2 MiB)
20:33:30.866 INFO  BlockManagerInfo - Added broadcast_190_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.2 MiB)
20:33:30.866 INFO  SparkContext - Created broadcast 190 from broadcast at DAGScheduler.scala:1580
20:33:30.866 INFO  BlockManagerInfo - Removed broadcast_182_piece0 on localhost:45281 in memory (size: 56.2 KiB, free: 1919.3 MiB)
20:33:30.867 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 104 (MapPartitionsRDD[458] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:30.867 INFO  TaskSchedulerImpl - Adding task set 104.0 with 1 tasks resource profile 0
20:33:30.867 INFO  BlockManagerInfo - Removed broadcast_185_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:30.867 INFO  TaskSetManager - Starting task 0.0 in stage 104.0 (TID 154) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7882 bytes) 
20:33:30.868 INFO  Executor - Running task 0.0 in stage 104.0 (TID 154)
20:33:30.868 INFO  BlockManagerInfo - Removed broadcast_187_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:30.869 INFO  BlockManagerInfo - Removed broadcast_183_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:30.869 INFO  BlockManagerInfo - Removed broadcast_181_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.7 MiB)
20:33:30.870 INFO  BlockManagerInfo - Removed broadcast_177_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.7 MiB)
20:33:30.871 INFO  BlockManagerInfo - Removed broadcast_178_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.8 MiB)
20:33:30.871 INFO  BlockManagerInfo - Removed broadcast_179_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:30.899 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:30.913 INFO  Executor - Finished task 0.0 in stage 104.0 (TID 154). 1149 bytes result sent to driver
20:33:30.913 INFO  TaskSetManager - Finished task 0.0 in stage 104.0 (TID 154) in 46 ms on localhost (executor driver) (1/1)
20:33:30.913 INFO  TaskSchedulerImpl - Removed TaskSet 104.0, whose tasks have all completed, from pool 
20:33:30.914 INFO  DAGScheduler - ShuffleMapStage 104 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.087 s
20:33:30.914 INFO  DAGScheduler - looking for newly runnable stages
20:33:30.914 INFO  DAGScheduler - running: HashSet()
20:33:30.914 INFO  DAGScheduler - waiting: HashSet(ResultStage 105)
20:33:30.914 INFO  DAGScheduler - failed: HashSet()
20:33:30.914 INFO  DAGScheduler - Submitting ResultStage 105 (MapPartitionsRDD[470] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:30.920 INFO  MemoryStore - Block broadcast_191 stored as values in memory (estimated size 150.2 KiB, free 1918.9 MiB)
20:33:30.921 INFO  MemoryStore - Block broadcast_191_piece0 stored as bytes in memory (estimated size 56.2 KiB, free 1918.8 MiB)
20:33:30.921 INFO  BlockManagerInfo - Added broadcast_191_piece0 in memory on localhost:45281 (size: 56.2 KiB, free: 1919.7 MiB)
20:33:30.922 INFO  SparkContext - Created broadcast 191 from broadcast at DAGScheduler.scala:1580
20:33:30.922 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 105 (MapPartitionsRDD[470] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.922 INFO  TaskSchedulerImpl - Adding task set 105.0 with 2 tasks resource profile 0
20:33:30.923 INFO  TaskSetManager - Starting task 0.0 in stage 105.0 (TID 155) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:30.923 INFO  TaskSetManager - Starting task 1.0 in stage 105.0 (TID 156) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:30.923 INFO  Executor - Running task 1.0 in stage 105.0 (TID 156)
20:33:30.923 INFO  Executor - Running task 0.0 in stage 105.0 (TID 155)
20:33:30.927 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.927 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.927 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.927 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.927 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.927 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.929 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.929 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.929 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.929 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:30.929 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:30.929 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:30.938 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.938 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.941 INFO  ShuffleBlockFetcherIterator - Getting 1 (160.4 KiB) non-empty blocks including 1 (160.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.941 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.946 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203330624085503955508680_0470_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest315075463562183185386.bam/_temporary/0/task_20250715203330624085503955508680_0470_r_000000
20:33:30.946 INFO  SparkHadoopMapRedUtil - attempt_20250715203330624085503955508680_0470_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:30.947 INFO  Executor - Finished task 0.0 in stage 105.0 (TID 155). 1729 bytes result sent to driver
20:33:30.947 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203330624085503955508680_0470_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest315075463562183185386.bam/_temporary/0/task_20250715203330624085503955508680_0470_r_000001
20:33:30.947 INFO  SparkHadoopMapRedUtil - attempt_20250715203330624085503955508680_0470_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:30.947 INFO  Executor - Finished task 1.0 in stage 105.0 (TID 156). 1729 bytes result sent to driver
20:33:30.947 INFO  TaskSetManager - Finished task 1.0 in stage 105.0 (TID 156) in 24 ms on localhost (executor driver) (1/2)
20:33:30.948 INFO  TaskSetManager - Finished task 0.0 in stage 105.0 (TID 155) in 25 ms on localhost (executor driver) (2/2)
20:33:30.948 INFO  TaskSchedulerImpl - Removed TaskSet 105.0, whose tasks have all completed, from pool 
20:33:30.948 INFO  DAGScheduler - ResultStage 105 (runJob at SparkHadoopWriter.scala:83) finished in 0.034 s
20:33:30.948 INFO  DAGScheduler - Job 74 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.948 INFO  TaskSchedulerImpl - Killing all running tasks in stage 105: Stage finished
20:33:30.948 INFO  DAGScheduler - Job 74 finished: runJob at SparkHadoopWriter.scala:83, took 0.121932 s
20:33:30.949 INFO  SparkHadoopWriter - Start to commit write Job job_20250715203330624085503955508680_0470.
20:33:30.954 INFO  SparkHadoopWriter - Write Job job_20250715203330624085503955508680_0470 committed. Elapsed time: 5 ms.
20:33:30.956 INFO  MemoryStore - Block broadcast_192 stored as values in memory (estimated size 297.9 KiB, free 1918.6 MiB)
20:33:30.963 INFO  MemoryStore - Block broadcast_192_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.5 MiB)
20:33:30.963 INFO  BlockManagerInfo - Added broadcast_192_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:30.963 INFO  SparkContext - Created broadcast 192 from newAPIHadoopFile at PathSplitSource.java:96
20:33:30.987 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:30.987 INFO  DAGScheduler - Got job 75 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:30.987 INFO  DAGScheduler - Final stage: ResultStage 107 (count at ReadsSparkSinkUnitTest.java:222)
20:33:30.987 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 106)
20:33:30.987 INFO  DAGScheduler - Missing parents: List()
20:33:30.988 INFO  DAGScheduler - Submitting ResultStage 107 (MapPartitionsRDD[461] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:30.989 INFO  MemoryStore - Block broadcast_193 stored as values in memory (estimated size 6.3 KiB, free 1918.5 MiB)
20:33:30.989 INFO  MemoryStore - Block broadcast_193_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1918.5 MiB)
20:33:30.989 INFO  BlockManagerInfo - Added broadcast_193_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.7 MiB)
20:33:30.989 INFO  SparkContext - Created broadcast 193 from broadcast at DAGScheduler.scala:1580
20:33:30.990 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 107 (MapPartitionsRDD[461] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:30.990 INFO  TaskSchedulerImpl - Adding task set 107.0 with 2 tasks resource profile 0
20:33:30.990 INFO  TaskSetManager - Starting task 0.0 in stage 107.0 (TID 157) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:30.991 INFO  TaskSetManager - Starting task 1.0 in stage 107.0 (TID 158) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:30.991 INFO  Executor - Running task 1.0 in stage 107.0 (TID 158)
20:33:30.991 INFO  Executor - Running task 0.0 in stage 107.0 (TID 157)
20:33:30.993 INFO  ShuffleBlockFetcherIterator - Getting 1 (160.4 KiB) non-empty blocks including 1 (160.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.993 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:30.993 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.993 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:30.997 INFO  Executor - Finished task 0.0 in stage 107.0 (TID 157). 1634 bytes result sent to driver
20:33:30.997 INFO  Executor - Finished task 1.0 in stage 107.0 (TID 158). 1634 bytes result sent to driver
20:33:30.997 INFO  TaskSetManager - Finished task 1.0 in stage 107.0 (TID 158) in 7 ms on localhost (executor driver) (1/2)
20:33:30.998 INFO  TaskSetManager - Finished task 0.0 in stage 107.0 (TID 157) in 8 ms on localhost (executor driver) (2/2)
20:33:30.998 INFO  TaskSchedulerImpl - Removed TaskSet 107.0, whose tasks have all completed, from pool 
20:33:30.998 INFO  DAGScheduler - ResultStage 107 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.010 s
20:33:30.998 INFO  DAGScheduler - Job 75 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:30.998 INFO  TaskSchedulerImpl - Killing all running tasks in stage 107: Stage finished
20:33:30.998 INFO  DAGScheduler - Job 75 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.011278 s
20:33:31.012 INFO  FileInputFormat - Total input files to process : 2
20:33:31.015 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:31.016 INFO  DAGScheduler - Got job 76 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:31.016 INFO  DAGScheduler - Final stage: ResultStage 108 (count at ReadsSparkSinkUnitTest.java:222)
20:33:31.016 INFO  DAGScheduler - Parents of final stage: List()
20:33:31.016 INFO  DAGScheduler - Missing parents: List()
20:33:31.016 INFO  DAGScheduler - Submitting ResultStage 108 (MapPartitionsRDD[477] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:31.034 INFO  MemoryStore - Block broadcast_194 stored as values in memory (estimated size 426.1 KiB, free 1918.1 MiB)
20:33:31.035 INFO  MemoryStore - Block broadcast_194_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.9 MiB)
20:33:31.035 INFO  BlockManagerInfo - Added broadcast_194_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:31.035 INFO  SparkContext - Created broadcast 194 from broadcast at DAGScheduler.scala:1580
20:33:31.036 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 108 (MapPartitionsRDD[477] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.036 INFO  TaskSchedulerImpl - Adding task set 108.0 with 2 tasks resource profile 0
20:33:31.036 INFO  TaskSetManager - Starting task 0.0 in stage 108.0 (TID 159) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7827 bytes) 
20:33:31.036 INFO  TaskSetManager - Starting task 1.0 in stage 108.0 (TID 160) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7827 bytes) 
20:33:31.037 INFO  Executor - Running task 0.0 in stage 108.0 (TID 159)
20:33:31.037 INFO  Executor - Running task 1.0 in stage 108.0 (TID 160)
20:33:31.083 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest315075463562183185386.bam/part-r-00001.bam:0+123314
20:33:31.083 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest315075463562183185386.bam/part-r-00000.bam:0+122169
20:33:31.087 INFO  Executor - Finished task 1.0 in stage 108.0 (TID 160). 989 bytes result sent to driver
20:33:31.087 INFO  Executor - Finished task 0.0 in stage 108.0 (TID 159). 989 bytes result sent to driver
20:33:31.087 INFO  TaskSetManager - Finished task 1.0 in stage 108.0 (TID 160) in 51 ms on localhost (executor driver) (1/2)
20:33:31.087 INFO  TaskSetManager - Finished task 0.0 in stage 108.0 (TID 159) in 51 ms on localhost (executor driver) (2/2)
20:33:31.087 INFO  TaskSchedulerImpl - Removed TaskSet 108.0, whose tasks have all completed, from pool 
20:33:31.088 INFO  DAGScheduler - ResultStage 108 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.072 s
20:33:31.088 INFO  DAGScheduler - Job 76 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.088 INFO  TaskSchedulerImpl - Killing all running tasks in stage 108: Stage finished
20:33:31.088 INFO  DAGScheduler - Job 76 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.072702 s
20:33:31.090 INFO  MemoryStore - Block broadcast_195 stored as values in memory (estimated size 576.0 B, free 1917.9 MiB)
20:33:31.091 INFO  MemoryStore - Block broadcast_195_piece0 stored as bytes in memory (estimated size 228.0 B, free 1917.9 MiB)
20:33:31.091 INFO  BlockManagerInfo - Added broadcast_195_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.5 MiB)
20:33:31.091 INFO  SparkContext - Created broadcast 195 from broadcast at CramSource.java:114
20:33:31.092 INFO  MemoryStore - Block broadcast_196 stored as values in memory (estimated size 297.9 KiB, free 1917.6 MiB)
20:33:31.098 INFO  MemoryStore - Block broadcast_196_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.6 MiB)
20:33:31.099 INFO  BlockManagerInfo - Added broadcast_196_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:31.099 INFO  SparkContext - Created broadcast 196 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.120 INFO  MemoryStore - Block broadcast_197 stored as values in memory (estimated size 576.0 B, free 1917.6 MiB)
20:33:31.120 INFO  MemoryStore - Block broadcast_197_piece0 stored as bytes in memory (estimated size 228.0 B, free 1917.6 MiB)
20:33:31.120 INFO  BlockManagerInfo - Added broadcast_197_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.5 MiB)
20:33:31.121 INFO  SparkContext - Created broadcast 197 from broadcast at CramSource.java:114
20:33:31.122 INFO  MemoryStore - Block broadcast_198 stored as values in memory (estimated size 297.9 KiB, free 1917.3 MiB)
20:33:31.132 INFO  MemoryStore - Block broadcast_198_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.3 MiB)
20:33:31.133 INFO  BlockManagerInfo - Added broadcast_198_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:31.133 INFO  SparkContext - Created broadcast 198 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.147 INFO  MemoryStore - Block broadcast_199 stored as values in memory (estimated size 6.0 KiB, free 1917.2 MiB)
20:33:31.147 INFO  MemoryStore - Block broadcast_199_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1917.2 MiB)
20:33:31.147 INFO  BlockManagerInfo - Added broadcast_199_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.4 MiB)
20:33:31.148 INFO  SparkContext - Created broadcast 199 from broadcast at ReadsSparkSink.java:133
20:33:31.148 INFO  MemoryStore - Block broadcast_200 stored as values in memory (estimated size 6.2 KiB, free 1917.2 MiB)
20:33:31.149 INFO  MemoryStore - Block broadcast_200_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1917.2 MiB)
20:33:31.149 INFO  BlockManagerInfo - Added broadcast_200_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.4 MiB)
20:33:31.149 INFO  SparkContext - Created broadcast 200 from broadcast at AnySamSinkMultiple.java:80
20:33:31.151 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.151 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.151 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.162 INFO  FileInputFormat - Total input files to process : 1
20:33:31.169 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:31.169 INFO  DAGScheduler - Registering RDD 484 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 23
20:33:31.169 INFO  DAGScheduler - Got job 77 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:31.169 INFO  DAGScheduler - Final stage: ResultStage 110 (runJob at SparkHadoopWriter.scala:83)
20:33:31.169 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 109)
20:33:31.170 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 109)
20:33:31.170 INFO  DAGScheduler - Submitting ShuffleMapStage 109 (MapPartitionsRDD[484] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:31.181 INFO  MemoryStore - Block broadcast_201 stored as values in memory (estimated size 288.4 KiB, free 1917.0 MiB)
20:33:31.182 INFO  MemoryStore - Block broadcast_201_piece0 stored as bytes in memory (estimated size 104.7 KiB, free 1916.9 MiB)
20:33:31.182 INFO  BlockManagerInfo - Added broadcast_201_piece0 in memory on localhost:45281 (size: 104.7 KiB, free: 1919.3 MiB)
20:33:31.183 INFO  SparkContext - Created broadcast 201 from broadcast at DAGScheduler.scala:1580
20:33:31.183 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 109 (MapPartitionsRDD[484] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:31.183 INFO  TaskSchedulerImpl - Adding task set 109.0 with 1 tasks resource profile 0
20:33:31.183 INFO  TaskSetManager - Starting task 0.0 in stage 109.0 (TID 161) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7869 bytes) 
20:33:31.184 INFO  Executor - Running task 0.0 in stage 109.0 (TID 161)
20:33:31.204 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:31.228 INFO  Executor - Finished task 0.0 in stage 109.0 (TID 161). 1235 bytes result sent to driver
20:33:31.229 INFO  BlockManagerInfo - Removed broadcast_186_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:31.229 INFO  TaskSetManager - Finished task 0.0 in stage 109.0 (TID 161) in 46 ms on localhost (executor driver) (1/1)
20:33:31.229 INFO  TaskSchedulerImpl - Removed TaskSet 109.0, whose tasks have all completed, from pool 
20:33:31.229 INFO  DAGScheduler - ShuffleMapStage 109 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.059 s
20:33:31.229 INFO  DAGScheduler - looking for newly runnable stages
20:33:31.229 INFO  DAGScheduler - running: HashSet()
20:33:31.229 INFO  DAGScheduler - waiting: HashSet(ResultStage 110)
20:33:31.229 INFO  DAGScheduler - failed: HashSet()
20:33:31.229 INFO  DAGScheduler - Submitting ResultStage 110 (MapPartitionsRDD[495] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:31.229 INFO  BlockManagerInfo - Removed broadcast_192_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:31.230 INFO  BlockManagerInfo - Removed broadcast_197_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.4 MiB)
20:33:31.230 INFO  BlockManagerInfo - Removed broadcast_190_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.6 MiB)
20:33:31.232 INFO  BlockManagerInfo - Removed broadcast_189_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.6 MiB)
20:33:31.232 INFO  BlockManagerInfo - Removed broadcast_198_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:31.233 INFO  BlockManagerInfo - Removed broadcast_193_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.6 MiB)
20:33:31.234 INFO  BlockManagerInfo - Removed broadcast_194_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:31.235 INFO  BlockManagerInfo - Removed broadcast_191_piece0 on localhost:45281 in memory (size: 56.2 KiB, free: 1919.8 MiB)
20:33:31.235 INFO  BlockManagerInfo - Removed broadcast_188_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.8 MiB)
20:33:31.238 INFO  MemoryStore - Block broadcast_202 stored as values in memory (estimated size 150.3 KiB, free 1919.1 MiB)
20:33:31.239 INFO  MemoryStore - Block broadcast_202_piece0 stored as bytes in memory (estimated size 56.4 KiB, free 1919.1 MiB)
20:33:31.239 INFO  BlockManagerInfo - Added broadcast_202_piece0 in memory on localhost:45281 (size: 56.4 KiB, free: 1919.8 MiB)
20:33:31.239 INFO  SparkContext - Created broadcast 202 from broadcast at DAGScheduler.scala:1580
20:33:31.239 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 110 (MapPartitionsRDD[495] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.239 INFO  TaskSchedulerImpl - Adding task set 110.0 with 2 tasks resource profile 0
20:33:31.240 INFO  TaskSetManager - Starting task 0.0 in stage 110.0 (TID 162) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:31.240 INFO  TaskSetManager - Starting task 1.0 in stage 110.0 (TID 163) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:31.241 INFO  Executor - Running task 0.0 in stage 110.0 (TID 162)
20:33:31.241 INFO  Executor - Running task 1.0 in stage 110.0 (TID 163)
20:33:31.245 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.245 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.245 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.245 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.245 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.245 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.246 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.246 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.247 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.247 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.247 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.247 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.255 INFO  ShuffleBlockFetcherIterator - Getting 1 (42.2 KiB) non-empty blocks including 1 (42.2 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.255 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.258 INFO  ShuffleBlockFetcherIterator - Getting 1 (42.2 KiB) non-empty blocks including 1 (42.2 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.258 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.259 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033317804918957480210870_0495_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest512749760155968719247.cram/_temporary/0/task_202507152033317804918957480210870_0495_r_000000
20:33:31.259 INFO  SparkHadoopMapRedUtil - attempt_202507152033317804918957480210870_0495_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:31.260 INFO  Executor - Finished task 0.0 in stage 110.0 (TID 162). 1729 bytes result sent to driver
20:33:31.260 INFO  TaskSetManager - Finished task 0.0 in stage 110.0 (TID 162) in 20 ms on localhost (executor driver) (1/2)
20:33:31.261 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033317804918957480210870_0495_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest512749760155968719247.cram/_temporary/0/task_202507152033317804918957480210870_0495_r_000001
20:33:31.261 INFO  SparkHadoopMapRedUtil - attempt_202507152033317804918957480210870_0495_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:31.261 INFO  Executor - Finished task 1.0 in stage 110.0 (TID 163). 1729 bytes result sent to driver
20:33:31.261 INFO  TaskSetManager - Finished task 1.0 in stage 110.0 (TID 163) in 21 ms on localhost (executor driver) (2/2)
20:33:31.261 INFO  TaskSchedulerImpl - Removed TaskSet 110.0, whose tasks have all completed, from pool 
20:33:31.262 INFO  DAGScheduler - ResultStage 110 (runJob at SparkHadoopWriter.scala:83) finished in 0.032 s
20:33:31.262 INFO  DAGScheduler - Job 77 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.262 INFO  TaskSchedulerImpl - Killing all running tasks in stage 110: Stage finished
20:33:31.262 INFO  DAGScheduler - Job 77 finished: runJob at SparkHadoopWriter.scala:83, took 0.092938 s
20:33:31.262 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033317804918957480210870_0495.
20:33:31.267 INFO  SparkHadoopWriter - Write Job job_202507152033317804918957480210870_0495 committed. Elapsed time: 5 ms.
20:33:31.269 INFO  MemoryStore - Block broadcast_203 stored as values in memory (estimated size 297.9 KiB, free 1918.8 MiB)
20:33:31.276 INFO  MemoryStore - Block broadcast_203_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.7 MiB)
20:33:31.276 INFO  BlockManagerInfo - Added broadcast_203_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:31.276 INFO  SparkContext - Created broadcast 203 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.299 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:31.299 INFO  DAGScheduler - Got job 78 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:31.299 INFO  DAGScheduler - Final stage: ResultStage 112 (count at ReadsSparkSinkUnitTest.java:222)
20:33:31.299 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 111)
20:33:31.299 INFO  DAGScheduler - Missing parents: List()
20:33:31.299 INFO  DAGScheduler - Submitting ResultStage 112 (MapPartitionsRDD[487] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:31.300 INFO  MemoryStore - Block broadcast_204 stored as values in memory (estimated size 6.3 KiB, free 1918.7 MiB)
20:33:31.301 INFO  MemoryStore - Block broadcast_204_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1918.7 MiB)
20:33:31.301 INFO  BlockManagerInfo - Added broadcast_204_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.7 MiB)
20:33:31.301 INFO  SparkContext - Created broadcast 204 from broadcast at DAGScheduler.scala:1580
20:33:31.301 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 112 (MapPartitionsRDD[487] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.301 INFO  TaskSchedulerImpl - Adding task set 112.0 with 2 tasks resource profile 0
20:33:31.302 INFO  TaskSetManager - Starting task 0.0 in stage 112.0 (TID 164) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:31.302 INFO  TaskSetManager - Starting task 1.0 in stage 112.0 (TID 165) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:31.302 INFO  Executor - Running task 0.0 in stage 112.0 (TID 164)
20:33:31.302 INFO  Executor - Running task 1.0 in stage 112.0 (TID 165)
20:33:31.304 INFO  ShuffleBlockFetcherIterator - Getting 1 (42.2 KiB) non-empty blocks including 1 (42.2 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.304 INFO  ShuffleBlockFetcherIterator - Getting 1 (42.2 KiB) non-empty blocks including 1 (42.2 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.304 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.304 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.307 INFO  Executor - Finished task 1.0 in stage 112.0 (TID 165). 1591 bytes result sent to driver
20:33:31.307 INFO  Executor - Finished task 0.0 in stage 112.0 (TID 164). 1591 bytes result sent to driver
20:33:31.308 INFO  TaskSetManager - Finished task 0.0 in stage 112.0 (TID 164) in 6 ms on localhost (executor driver) (1/2)
20:33:31.308 INFO  TaskSetManager - Finished task 1.0 in stage 112.0 (TID 165) in 6 ms on localhost (executor driver) (2/2)
20:33:31.308 INFO  TaskSchedulerImpl - Removed TaskSet 112.0, whose tasks have all completed, from pool 
20:33:31.308 INFO  DAGScheduler - ResultStage 112 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.008 s
20:33:31.308 INFO  DAGScheduler - Job 78 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.308 INFO  TaskSchedulerImpl - Killing all running tasks in stage 112: Stage finished
20:33:31.308 INFO  DAGScheduler - Job 78 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.009713 s
20:33:31.323 INFO  FileInputFormat - Total input files to process : 2
20:33:31.326 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:31.327 INFO  DAGScheduler - Got job 79 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:31.327 INFO  DAGScheduler - Final stage: ResultStage 113 (count at ReadsSparkSinkUnitTest.java:222)
20:33:31.327 INFO  DAGScheduler - Parents of final stage: List()
20:33:31.327 INFO  DAGScheduler - Missing parents: List()
20:33:31.327 INFO  DAGScheduler - Submitting ResultStage 113 (MapPartitionsRDD[502] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:31.345 INFO  MemoryStore - Block broadcast_205 stored as values in memory (estimated size 426.1 KiB, free 1918.3 MiB)
20:33:31.346 INFO  MemoryStore - Block broadcast_205_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.1 MiB)
20:33:31.346 INFO  BlockManagerInfo - Added broadcast_205_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.6 MiB)
20:33:31.347 INFO  SparkContext - Created broadcast 205 from broadcast at DAGScheduler.scala:1580
20:33:31.347 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 113 (MapPartitionsRDD[502] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.347 INFO  TaskSchedulerImpl - Adding task set 113.0 with 2 tasks resource profile 0
20:33:31.348 INFO  TaskSetManager - Starting task 0.0 in stage 113.0 (TID 166) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7828 bytes) 
20:33:31.348 INFO  TaskSetManager - Starting task 1.0 in stage 113.0 (TID 167) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7828 bytes) 
20:33:31.348 INFO  Executor - Running task 1.0 in stage 113.0 (TID 167)
20:33:31.348 INFO  Executor - Running task 0.0 in stage 113.0 (TID 166)
20:33:31.380 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest512749760155968719247.cram/part-r-00001.bam:0+30825
20:33:31.383 INFO  Executor - Finished task 0.0 in stage 113.0 (TID 166). 989 bytes result sent to driver
20:33:31.384 INFO  TaskSetManager - Finished task 0.0 in stage 113.0 (TID 166) in 37 ms on localhost (executor driver) (1/2)
20:33:31.387 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest512749760155968719247.cram/part-r-00000.bam:0+31473
20:33:31.390 INFO  Executor - Finished task 1.0 in stage 113.0 (TID 167). 989 bytes result sent to driver
20:33:31.390 INFO  TaskSetManager - Finished task 1.0 in stage 113.0 (TID 167) in 42 ms on localhost (executor driver) (2/2)
20:33:31.390 INFO  TaskSchedulerImpl - Removed TaskSet 113.0, whose tasks have all completed, from pool 
20:33:31.391 INFO  DAGScheduler - ResultStage 113 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.063 s
20:33:31.391 INFO  DAGScheduler - Job 79 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.391 INFO  TaskSchedulerImpl - Killing all running tasks in stage 113: Stage finished
20:33:31.391 INFO  DAGScheduler - Job 79 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.064250 s
20:33:31.395 INFO  MemoryStore - Block broadcast_206 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:31.401 INFO  MemoryStore - Block broadcast_206_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.8 MiB)
20:33:31.401 INFO  BlockManagerInfo - Added broadcast_206_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:31.401 INFO  SparkContext - Created broadcast 206 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.425 INFO  MemoryStore - Block broadcast_207 stored as values in memory (estimated size 297.9 KiB, free 1917.5 MiB)
20:33:31.431 INFO  MemoryStore - Block broadcast_207_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.5 MiB)
20:33:31.432 INFO  BlockManagerInfo - Added broadcast_207_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:31.432 INFO  SparkContext - Created broadcast 207 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.452 INFO  MemoryStore - Block broadcast_208 stored as values in memory (estimated size 160.7 KiB, free 1917.3 MiB)
20:33:31.453 INFO  MemoryStore - Block broadcast_208_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:31.453 INFO  BlockManagerInfo - Added broadcast_208_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:31.453 INFO  SparkContext - Created broadcast 208 from broadcast at ReadsSparkSink.java:133
20:33:31.455 INFO  MemoryStore - Block broadcast_209 stored as values in memory (estimated size 163.2 KiB, free 1917.1 MiB)
20:33:31.455 INFO  MemoryStore - Block broadcast_209_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.1 MiB)
20:33:31.456 INFO  BlockManagerInfo - Added broadcast_209_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:31.456 INFO  SparkContext - Created broadcast 209 from broadcast at AnySamSinkMultiple.java:80
20:33:31.458 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.458 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.458 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.471 INFO  FileInputFormat - Total input files to process : 1
20:33:31.477 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:31.478 INFO  DAGScheduler - Registering RDD 510 (repartition at ReadsSparkSinkUnitTest.java:210) as input to shuffle 24
20:33:31.478 INFO  DAGScheduler - Got job 80 (runJob at SparkHadoopWriter.scala:83) with 2 output partitions
20:33:31.478 INFO  DAGScheduler - Final stage: ResultStage 115 (runJob at SparkHadoopWriter.scala:83)
20:33:31.478 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 114)
20:33:31.478 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 114)
20:33:31.478 INFO  DAGScheduler - Submitting ShuffleMapStage 114 (MapPartitionsRDD[510] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:31.496 INFO  MemoryStore - Block broadcast_210 stored as values in memory (estimated size 427.7 KiB, free 1916.7 MiB)
20:33:31.497 INFO  MemoryStore - Block broadcast_210_piece0 stored as bytes in memory (estimated size 154.6 KiB, free 1916.6 MiB)
20:33:31.497 INFO  BlockManagerInfo - Added broadcast_210_piece0 in memory on localhost:45281 (size: 154.6 KiB, free: 1919.3 MiB)
20:33:31.497 INFO  SparkContext - Created broadcast 210 from broadcast at DAGScheduler.scala:1580
20:33:31.498 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 114 (MapPartitionsRDD[510] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0))
20:33:31.498 INFO  TaskSchedulerImpl - Adding task set 114.0 with 1 tasks resource profile 0
20:33:31.498 INFO  TaskSetManager - Starting task 0.0 in stage 114.0 (TID 168) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:31.499 INFO  Executor - Running task 0.0 in stage 114.0 (TID 168)
20:33:31.531 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:31.547 INFO  Executor - Finished task 0.0 in stage 114.0 (TID 168). 1149 bytes result sent to driver
20:33:31.548 INFO  TaskSetManager - Finished task 0.0 in stage 114.0 (TID 168) in 50 ms on localhost (executor driver) (1/1)
20:33:31.548 INFO  TaskSchedulerImpl - Removed TaskSet 114.0, whose tasks have all completed, from pool 
20:33:31.548 INFO  DAGScheduler - ShuffleMapStage 114 (repartition at ReadsSparkSinkUnitTest.java:210) finished in 0.070 s
20:33:31.548 INFO  DAGScheduler - looking for newly runnable stages
20:33:31.548 INFO  DAGScheduler - running: HashSet()
20:33:31.548 INFO  DAGScheduler - waiting: HashSet(ResultStage 115)
20:33:31.548 INFO  DAGScheduler - failed: HashSet()
20:33:31.548 INFO  DAGScheduler - Submitting ResultStage 115 (MapPartitionsRDD[522] at mapToPair at AnySamSinkMultiple.java:89), which has no missing parents
20:33:31.559 INFO  MemoryStore - Block broadcast_211 stored as values in memory (estimated size 150.2 KiB, free 1916.4 MiB)
20:33:31.560 INFO  MemoryStore - Block broadcast_211_piece0 stored as bytes in memory (estimated size 56.3 KiB, free 1916.4 MiB)
20:33:31.560 INFO  BlockManagerInfo - Added broadcast_211_piece0 in memory on localhost:45281 (size: 56.3 KiB, free: 1919.3 MiB)
20:33:31.560 INFO  SparkContext - Created broadcast 211 from broadcast at DAGScheduler.scala:1580
20:33:31.560 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 115 (MapPartitionsRDD[522] at mapToPair at AnySamSinkMultiple.java:89) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.560 INFO  TaskSchedulerImpl - Adding task set 115.0 with 2 tasks resource profile 0
20:33:31.561 INFO  TaskSetManager - Starting task 0.0 in stage 115.0 (TID 169) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:31.561 INFO  TaskSetManager - Starting task 1.0 in stage 115.0 (TID 170) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:31.561 INFO  Executor - Running task 0.0 in stage 115.0 (TID 169)
20:33:31.562 INFO  Executor - Running task 1.0 in stage 115.0 (TID 170)
20:33:31.565 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.565 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.565 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.566 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.566 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.566 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.567 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.567 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.567 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.567 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.567 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.567 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.577 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.577 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.581 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.581 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.584 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033313619678543382305482_0522_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest616484712555443987791.sam/_temporary/0/task_202507152033313619678543382305482_0522_r_000000
20:33:31.584 INFO  SparkHadoopMapRedUtil - attempt_202507152033313619678543382305482_0522_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:31.584 INFO  Executor - Finished task 0.0 in stage 115.0 (TID 169). 1729 bytes result sent to driver
20:33:31.585 INFO  TaskSetManager - Finished task 0.0 in stage 115.0 (TID 169) in 24 ms on localhost (executor driver) (1/2)
20:33:31.586 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033313619678543382305482_0522_r_000001_0' to file:/tmp/ReadsSparkSinkUnitTest616484712555443987791.sam/_temporary/0/task_202507152033313619678543382305482_0522_r_000001
20:33:31.586 INFO  SparkHadoopMapRedUtil - attempt_202507152033313619678543382305482_0522_r_000001_0: Committed. Elapsed time: 0 ms.
20:33:31.586 INFO  Executor - Finished task 1.0 in stage 115.0 (TID 170). 1729 bytes result sent to driver
20:33:31.587 INFO  TaskSetManager - Finished task 1.0 in stage 115.0 (TID 170) in 26 ms on localhost (executor driver) (2/2)
20:33:31.587 INFO  TaskSchedulerImpl - Removed TaskSet 115.0, whose tasks have all completed, from pool 
20:33:31.587 INFO  DAGScheduler - ResultStage 115 (runJob at SparkHadoopWriter.scala:83) finished in 0.038 s
20:33:31.587 INFO  DAGScheduler - Job 80 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.587 INFO  TaskSchedulerImpl - Killing all running tasks in stage 115: Stage finished
20:33:31.587 INFO  DAGScheduler - Job 80 finished: runJob at SparkHadoopWriter.scala:83, took 0.109956 s
20:33:31.588 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033313619678543382305482_0522.
20:33:31.594 INFO  SparkHadoopWriter - Write Job job_202507152033313619678543382305482_0522 committed. Elapsed time: 5 ms.
20:33:31.596 INFO  MemoryStore - Block broadcast_212 stored as values in memory (estimated size 297.9 KiB, free 1916.1 MiB)
20:33:31.602 INFO  MemoryStore - Block broadcast_212_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.0 MiB)
20:33:31.603 INFO  BlockManagerInfo - Added broadcast_212_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.2 MiB)
20:33:31.603 INFO  SparkContext - Created broadcast 212 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.626 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:31.627 INFO  DAGScheduler - Got job 81 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:31.627 INFO  DAGScheduler - Final stage: ResultStage 117 (count at ReadsSparkSinkUnitTest.java:222)
20:33:31.627 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 116)
20:33:31.627 INFO  DAGScheduler - Missing parents: List()
20:33:31.627 INFO  DAGScheduler - Submitting ResultStage 117 (MapPartitionsRDD[513] at repartition at ReadsSparkSinkUnitTest.java:210), which has no missing parents
20:33:31.628 INFO  MemoryStore - Block broadcast_213 stored as values in memory (estimated size 6.3 KiB, free 1916.0 MiB)
20:33:31.635 INFO  MemoryStore - Block broadcast_213_piece0 stored as bytes in memory (estimated size 3.4 KiB, free 1916.0 MiB)
20:33:31.635 INFO  BlockManagerInfo - Added broadcast_213_piece0 in memory on localhost:45281 (size: 3.4 KiB, free: 1919.2 MiB)
20:33:31.635 INFO  BlockManagerInfo - Removed broadcast_211_piece0 on localhost:45281 in memory (size: 56.3 KiB, free: 1919.3 MiB)
20:33:31.636 INFO  SparkContext - Created broadcast 213 from broadcast at DAGScheduler.scala:1580
20:33:31.636 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 117 (MapPartitionsRDD[513] at repartition at ReadsSparkSinkUnitTest.java:210) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.636 INFO  BlockManagerInfo - Removed broadcast_204_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.3 MiB)
20:33:31.636 INFO  TaskSchedulerImpl - Adding task set 117.0 with 2 tasks resource profile 0
20:33:31.636 INFO  BlockManagerInfo - Removed broadcast_208_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:31.637 INFO  TaskSetManager - Starting task 0.0 in stage 117.0 (TID 171) (localhost, executor driver, partition 0, NODE_LOCAL, 7860 bytes) 
20:33:31.637 INFO  TaskSetManager - Starting task 1.0 in stage 117.0 (TID 172) (localhost, executor driver, partition 1, NODE_LOCAL, 7860 bytes) 
20:33:31.637 INFO  Executor - Running task 0.0 in stage 117.0 (TID 171)
20:33:31.637 INFO  Executor - Running task 1.0 in stage 117.0 (TID 172)
20:33:31.638 INFO  BlockManagerInfo - Removed broadcast_200_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.3 MiB)
20:33:31.638 INFO  BlockManagerInfo - Removed broadcast_203_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:31.639 INFO  BlockManagerInfo - Removed broadcast_199_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.3 MiB)
20:33:31.639 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.639 INFO  ShuffleBlockFetcherIterator - Getting 1 (176.4 KiB) non-empty blocks including 1 (176.4 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.639 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.639 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.641 INFO  BlockManagerInfo - Removed broadcast_195_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.3 MiB)
20:33:31.643 INFO  BlockManagerInfo - Removed broadcast_202_piece0 on localhost:45281 in memory (size: 56.4 KiB, free: 1919.4 MiB)
20:33:31.643 INFO  Executor - Finished task 0.0 in stage 117.0 (TID 171). 1634 bytes result sent to driver
20:33:31.643 INFO  Executor - Finished task 1.0 in stage 117.0 (TID 172). 1591 bytes result sent to driver
20:33:31.644 INFO  TaskSetManager - Finished task 0.0 in stage 117.0 (TID 171) in 7 ms on localhost (executor driver) (1/2)
20:33:31.644 INFO  BlockManagerInfo - Removed broadcast_209_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:31.644 INFO  TaskSetManager - Finished task 1.0 in stage 117.0 (TID 172) in 7 ms on localhost (executor driver) (2/2)
20:33:31.644 INFO  TaskSchedulerImpl - Removed TaskSet 117.0, whose tasks have all completed, from pool 
20:33:31.644 INFO  DAGScheduler - ResultStage 117 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.017 s
20:33:31.644 INFO  DAGScheduler - Job 81 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.645 INFO  TaskSchedulerImpl - Killing all running tasks in stage 117: Stage finished
20:33:31.645 INFO  BlockManagerInfo - Removed broadcast_207_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:31.645 INFO  DAGScheduler - Job 81 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.019136 s
20:33:31.646 INFO  BlockManagerInfo - Removed broadcast_196_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:31.647 INFO  BlockManagerInfo - Removed broadcast_201_piece0 on localhost:45281 in memory (size: 104.7 KiB, free: 1919.6 MiB)
20:33:31.647 INFO  BlockManagerInfo - Removed broadcast_205_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:31.648 INFO  BlockManagerInfo - Removed broadcast_210_piece0 on localhost:45281 in memory (size: 154.6 KiB, free: 1919.9 MiB)
20:33:31.659 INFO  FileInputFormat - Total input files to process : 2
20:33:31.662 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:222
20:33:31.662 INFO  DAGScheduler - Got job 82 (count at ReadsSparkSinkUnitTest.java:222) with 2 output partitions
20:33:31.662 INFO  DAGScheduler - Final stage: ResultStage 118 (count at ReadsSparkSinkUnitTest.java:222)
20:33:31.662 INFO  DAGScheduler - Parents of final stage: List()
20:33:31.662 INFO  DAGScheduler - Missing parents: List()
20:33:31.663 INFO  DAGScheduler - Submitting ResultStage 118 (MapPartitionsRDD[529] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:31.680 INFO  MemoryStore - Block broadcast_214 stored as values in memory (estimated size 426.1 KiB, free 1918.9 MiB)
20:33:31.681 INFO  MemoryStore - Block broadcast_214_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.7 MiB)
20:33:31.681 INFO  BlockManagerInfo - Added broadcast_214_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:31.682 INFO  SparkContext - Created broadcast 214 from broadcast at DAGScheduler.scala:1580
20:33:31.682 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 118 (MapPartitionsRDD[529] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
20:33:31.682 INFO  TaskSchedulerImpl - Adding task set 118.0 with 2 tasks resource profile 0
20:33:31.682 INFO  TaskSetManager - Starting task 0.0 in stage 118.0 (TID 173) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7827 bytes) 
20:33:31.682 INFO  TaskSetManager - Starting task 1.0 in stage 118.0 (TID 174) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7827 bytes) 
20:33:31.683 INFO  Executor - Running task 1.0 in stage 118.0 (TID 174)
20:33:31.683 INFO  Executor - Running task 0.0 in stage 118.0 (TID 173)
20:33:31.713 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest616484712555443987791.sam/part-r-00000.bam:0+132492
20:33:31.715 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest616484712555443987791.sam/part-r-00001.bam:0+129330
20:33:31.724 INFO  Executor - Finished task 0.0 in stage 118.0 (TID 173). 989 bytes result sent to driver
20:33:31.724 INFO  Executor - Finished task 1.0 in stage 118.0 (TID 174). 989 bytes result sent to driver
20:33:31.724 INFO  TaskSetManager - Finished task 0.0 in stage 118.0 (TID 173) in 42 ms on localhost (executor driver) (1/2)
20:33:31.724 INFO  TaskSetManager - Finished task 1.0 in stage 118.0 (TID 174) in 42 ms on localhost (executor driver) (2/2)
20:33:31.724 INFO  TaskSchedulerImpl - Removed TaskSet 118.0, whose tasks have all completed, from pool 
20:33:31.724 INFO  DAGScheduler - ResultStage 118 (count at ReadsSparkSinkUnitTest.java:222) finished in 0.061 s
20:33:31.725 INFO  DAGScheduler - Job 82 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.725 INFO  TaskSchedulerImpl - Killing all running tasks in stage 118: Stage finished
20:33:31.725 INFO  DAGScheduler - Job 82 finished: count at ReadsSparkSinkUnitTest.java:222, took 0.062756 s
20:33:31.729 INFO  MemoryStore - Block broadcast_215 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:31.735 INFO  MemoryStore - Block broadcast_215_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:31.735 INFO  BlockManagerInfo - Added broadcast_215_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:31.735 INFO  SparkContext - Created broadcast 215 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.757 INFO  MemoryStore - Block broadcast_216 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:31.763 INFO  MemoryStore - Block broadcast_216_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.1 MiB)
20:33:31.764 INFO  BlockManagerInfo - Added broadcast_216_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:31.764 INFO  SparkContext - Created broadcast 216 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.784 INFO  FileInputFormat - Total input files to process : 1
20:33:31.785 INFO  MemoryStore - Block broadcast_217 stored as values in memory (estimated size 160.7 KiB, free 1917.9 MiB)
20:33:31.786 INFO  MemoryStore - Block broadcast_217_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.9 MiB)
20:33:31.786 INFO  BlockManagerInfo - Added broadcast_217_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:31.786 INFO  SparkContext - Created broadcast 217 from broadcast at ReadsSparkSink.java:133
20:33:31.788 INFO  MemoryStore - Block broadcast_218 stored as values in memory (estimated size 163.2 KiB, free 1917.7 MiB)
20:33:31.788 INFO  MemoryStore - Block broadcast_218_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:31.788 INFO  BlockManagerInfo - Added broadcast_218_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:31.789 INFO  SparkContext - Created broadcast 218 from broadcast at BamSink.java:76
20:33:31.790 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.790 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.790 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.808 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:31.809 INFO  DAGScheduler - Registering RDD 543 (mapToPair at SparkUtils.java:161) as input to shuffle 25
20:33:31.809 INFO  DAGScheduler - Got job 83 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:31.809 INFO  DAGScheduler - Final stage: ResultStage 120 (runJob at SparkHadoopWriter.scala:83)
20:33:31.809 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 119)
20:33:31.809 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 119)
20:33:31.809 INFO  DAGScheduler - Submitting ShuffleMapStage 119 (MapPartitionsRDD[543] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:31.827 INFO  MemoryStore - Block broadcast_219 stored as values in memory (estimated size 520.4 KiB, free 1917.2 MiB)
20:33:31.828 INFO  MemoryStore - Block broadcast_219_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.1 MiB)
20:33:31.828 INFO  BlockManagerInfo - Added broadcast_219_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.5 MiB)
20:33:31.828 INFO  SparkContext - Created broadcast 219 from broadcast at DAGScheduler.scala:1580
20:33:31.829 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 119 (MapPartitionsRDD[543] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:31.829 INFO  TaskSchedulerImpl - Adding task set 119.0 with 1 tasks resource profile 0
20:33:31.829 INFO  TaskSetManager - Starting task 0.0 in stage 119.0 (TID 175) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:31.830 INFO  Executor - Running task 0.0 in stage 119.0 (TID 175)
20:33:31.860 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:31.875 INFO  Executor - Finished task 0.0 in stage 119.0 (TID 175). 1148 bytes result sent to driver
20:33:31.876 INFO  TaskSetManager - Finished task 0.0 in stage 119.0 (TID 175) in 47 ms on localhost (executor driver) (1/1)
20:33:31.876 INFO  TaskSchedulerImpl - Removed TaskSet 119.0, whose tasks have all completed, from pool 
20:33:31.876 INFO  DAGScheduler - ShuffleMapStage 119 (mapToPair at SparkUtils.java:161) finished in 0.067 s
20:33:31.876 INFO  DAGScheduler - looking for newly runnable stages
20:33:31.876 INFO  DAGScheduler - running: HashSet()
20:33:31.876 INFO  DAGScheduler - waiting: HashSet(ResultStage 120)
20:33:31.876 INFO  DAGScheduler - failed: HashSet()
20:33:31.876 INFO  DAGScheduler - Submitting ResultStage 120 (MapPartitionsRDD[548] at mapToPair at BamSink.java:91), which has no missing parents
20:33:31.883 INFO  MemoryStore - Block broadcast_220 stored as values in memory (estimated size 241.4 KiB, free 1916.8 MiB)
20:33:31.884 INFO  MemoryStore - Block broadcast_220_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1916.8 MiB)
20:33:31.884 INFO  BlockManagerInfo - Added broadcast_220_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.4 MiB)
20:33:31.884 INFO  SparkContext - Created broadcast 220 from broadcast at DAGScheduler.scala:1580
20:33:31.885 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 120 (MapPartitionsRDD[548] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:31.885 INFO  TaskSchedulerImpl - Adding task set 120.0 with 1 tasks resource profile 0
20:33:31.885 INFO  TaskSetManager - Starting task 0.0 in stage 120.0 (TID 176) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:31.886 INFO  Executor - Running task 0.0 in stage 120.0 (TID 176)
20:33:31.890 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:31.890 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:31.902 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.902 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.902 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.903 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:31.903 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:31.903 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:31.930 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033317746529755544404950_0548_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest13308687119890102574.bam.parts/_temporary/0/task_202507152033317746529755544404950_0548_r_000000
20:33:31.930 INFO  SparkHadoopMapRedUtil - attempt_202507152033317746529755544404950_0548_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:31.930 INFO  Executor - Finished task 0.0 in stage 120.0 (TID 176). 1858 bytes result sent to driver
20:33:31.931 INFO  TaskSetManager - Finished task 0.0 in stage 120.0 (TID 176) in 46 ms on localhost (executor driver) (1/1)
20:33:31.931 INFO  TaskSchedulerImpl - Removed TaskSet 120.0, whose tasks have all completed, from pool 
20:33:31.931 INFO  DAGScheduler - ResultStage 120 (runJob at SparkHadoopWriter.scala:83) finished in 0.054 s
20:33:31.931 INFO  DAGScheduler - Job 83 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:31.931 INFO  TaskSchedulerImpl - Killing all running tasks in stage 120: Stage finished
20:33:31.931 INFO  DAGScheduler - Job 83 finished: runJob at SparkHadoopWriter.scala:83, took 0.123302 s
20:33:31.932 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033317746529755544404950_0548.
20:33:31.938 INFO  SparkHadoopWriter - Write Job job_202507152033317746529755544404950_0548 committed. Elapsed time: 6 ms.
20:33:31.950 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest13308687119890102574.bam
20:33:31.955 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest13308687119890102574.bam done
20:33:31.955 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest13308687119890102574.bam.parts/ to /tmp/ReadsSparkSinkUnitTest13308687119890102574.bam.sbi
20:33:31.960 INFO  IndexFileMerger - Done merging .sbi files
20:33:31.961 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest13308687119890102574.bam.parts/ to /tmp/ReadsSparkSinkUnitTest13308687119890102574.bam.bai
20:33:31.966 INFO  IndexFileMerger - Done merging .bai files
20:33:31.969 INFO  MemoryStore - Block broadcast_221 stored as values in memory (estimated size 320.0 B, free 1916.8 MiB)
20:33:31.969 INFO  MemoryStore - Block broadcast_221_piece0 stored as bytes in memory (estimated size 233.0 B, free 1916.8 MiB)
20:33:31.969 INFO  BlockManagerInfo - Added broadcast_221_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.4 MiB)
20:33:31.970 INFO  SparkContext - Created broadcast 221 from broadcast at BamSource.java:104
20:33:31.971 INFO  MemoryStore - Block broadcast_222 stored as values in memory (estimated size 297.9 KiB, free 1916.5 MiB)
20:33:31.982 INFO  MemoryStore - Block broadcast_222_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.4 MiB)
20:33:31.982 INFO  BlockManagerInfo - Added broadcast_222_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:31.982 INFO  SparkContext - Created broadcast 222 from newAPIHadoopFile at PathSplitSource.java:96
20:33:31.996 INFO  FileInputFormat - Total input files to process : 1
20:33:32.016 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:32.016 INFO  DAGScheduler - Got job 84 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:32.016 INFO  DAGScheduler - Final stage: ResultStage 121 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:32.016 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.016 INFO  DAGScheduler - Missing parents: List()
20:33:32.016 INFO  DAGScheduler - Submitting ResultStage 121 (MapPartitionsRDD[554] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.027 INFO  MemoryStore - Block broadcast_223 stored as values in memory (estimated size 148.2 KiB, free 1916.3 MiB)
20:33:32.027 INFO  MemoryStore - Block broadcast_223_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1916.2 MiB)
20:33:32.028 INFO  BlockManagerInfo - Added broadcast_223_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.3 MiB)
20:33:32.028 INFO  SparkContext - Created broadcast 223 from broadcast at DAGScheduler.scala:1580
20:33:32.028 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 121 (MapPartitionsRDD[554] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.028 INFO  TaskSchedulerImpl - Adding task set 121.0 with 1 tasks resource profile 0
20:33:32.028 INFO  TaskSetManager - Starting task 0.0 in stage 121.0 (TID 177) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:32.029 INFO  Executor - Running task 0.0 in stage 121.0 (TID 177)
20:33:32.040 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest13308687119890102574.bam:0+237038
20:33:32.046 INFO  Executor - Finished task 0.0 in stage 121.0 (TID 177). 651526 bytes result sent to driver
20:33:32.047 INFO  TaskSetManager - Finished task 0.0 in stage 121.0 (TID 177) in 19 ms on localhost (executor driver) (1/1)
20:33:32.047 INFO  TaskSchedulerImpl - Removed TaskSet 121.0, whose tasks have all completed, from pool 
20:33:32.047 INFO  DAGScheduler - ResultStage 121 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.030 s
20:33:32.048 INFO  DAGScheduler - Job 84 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.048 INFO  TaskSchedulerImpl - Killing all running tasks in stage 121: Stage finished
20:33:32.048 INFO  DAGScheduler - Job 84 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.031922 s
20:33:32.063 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:32.064 INFO  DAGScheduler - Got job 85 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:32.064 INFO  DAGScheduler - Final stage: ResultStage 122 (count at ReadsSparkSinkUnitTest.java:185)
20:33:32.064 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.064 INFO  DAGScheduler - Missing parents: List()
20:33:32.064 INFO  DAGScheduler - Submitting ResultStage 122 (MapPartitionsRDD[536] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.084 INFO  MemoryStore - Block broadcast_224 stored as values in memory (estimated size 426.1 KiB, free 1915.8 MiB)
20:33:32.085 INFO  MemoryStore - Block broadcast_224_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.7 MiB)
20:33:32.086 INFO  BlockManagerInfo - Added broadcast_224_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.2 MiB)
20:33:32.086 INFO  SparkContext - Created broadcast 224 from broadcast at DAGScheduler.scala:1580
20:33:32.086 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 122 (MapPartitionsRDD[536] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.086 INFO  TaskSchedulerImpl - Adding task set 122.0 with 1 tasks resource profile 0
20:33:32.086 INFO  TaskSetManager - Starting task 0.0 in stage 122.0 (TID 178) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:32.087 INFO  Executor - Running task 0.0 in stage 122.0 (TID 178)
20:33:32.117 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:32.133 INFO  Executor - Finished task 0.0 in stage 122.0 (TID 178). 1075 bytes result sent to driver
20:33:32.133 INFO  BlockManagerInfo - Removed broadcast_223_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.2 MiB)
20:33:32.133 INFO  TaskSetManager - Finished task 0.0 in stage 122.0 (TID 178) in 47 ms on localhost (executor driver) (1/1)
20:33:32.133 INFO  TaskSchedulerImpl - Removed TaskSet 122.0, whose tasks have all completed, from pool 
20:33:32.133 INFO  DAGScheduler - ResultStage 122 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.069 s
20:33:32.133 INFO  DAGScheduler - Job 85 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.133 INFO  TaskSchedulerImpl - Killing all running tasks in stage 122: Stage finished
20:33:32.134 INFO  DAGScheduler - Job 85 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.070093 s
20:33:32.134 INFO  BlockManagerInfo - Removed broadcast_217_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.2 MiB)
20:33:32.135 INFO  BlockManagerInfo - Removed broadcast_216_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:32.136 INFO  BlockManagerInfo - Removed broadcast_212_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:32.136 INFO  BlockManagerInfo - Removed broadcast_220_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.4 MiB)
20:33:32.137 INFO  BlockManagerInfo - Removed broadcast_219_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:32.137 INFO  BlockManagerInfo - Removed broadcast_213_piece0 on localhost:45281 in memory (size: 3.4 KiB, free: 1919.5 MiB)
20:33:32.138 INFO  BlockManagerInfo - Removed broadcast_206_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:32.138 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:32.138 INFO  DAGScheduler - Got job 86 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:32.138 INFO  DAGScheduler - Final stage: ResultStage 123 (count at ReadsSparkSinkUnitTest.java:185)
20:33:32.138 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.138 INFO  DAGScheduler - Missing parents: List()
20:33:32.139 INFO  DAGScheduler - Submitting ResultStage 123 (MapPartitionsRDD[554] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.139 INFO  BlockManagerInfo - Removed broadcast_218_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:32.139 INFO  BlockManagerInfo - Removed broadcast_214_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:32.145 INFO  MemoryStore - Block broadcast_225 stored as values in memory (estimated size 148.1 KiB, free 1918.6 MiB)
20:33:32.146 INFO  MemoryStore - Block broadcast_225_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.6 MiB)
20:33:32.146 INFO  BlockManagerInfo - Added broadcast_225_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.7 MiB)
20:33:32.146 INFO  SparkContext - Created broadcast 225 from broadcast at DAGScheduler.scala:1580
20:33:32.146 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 123 (MapPartitionsRDD[554] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.146 INFO  TaskSchedulerImpl - Adding task set 123.0 with 1 tasks resource profile 0
20:33:32.147 INFO  TaskSetManager - Starting task 0.0 in stage 123.0 (TID 179) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:32.147 INFO  Executor - Running task 0.0 in stage 123.0 (TID 179)
20:33:32.164 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest13308687119890102574.bam:0+237038
20:33:32.167 INFO  Executor - Finished task 0.0 in stage 123.0 (TID 179). 989 bytes result sent to driver
20:33:32.168 INFO  TaskSetManager - Finished task 0.0 in stage 123.0 (TID 179) in 21 ms on localhost (executor driver) (1/1)
20:33:32.168 INFO  TaskSchedulerImpl - Removed TaskSet 123.0, whose tasks have all completed, from pool 
20:33:32.168 INFO  DAGScheduler - ResultStage 123 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.029 s
20:33:32.168 INFO  DAGScheduler - Job 86 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.168 INFO  TaskSchedulerImpl - Killing all running tasks in stage 123: Stage finished
20:33:32.168 INFO  DAGScheduler - Job 86 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.029963 s
20:33:32.171 INFO  MemoryStore - Block broadcast_226 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:32.177 INFO  MemoryStore - Block broadcast_226_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:32.177 INFO  BlockManagerInfo - Added broadcast_226_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:32.177 INFO  SparkContext - Created broadcast 226 from newAPIHadoopFile at PathSplitSource.java:96
20:33:32.208 INFO  MemoryStore - Block broadcast_227 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:32.214 INFO  MemoryStore - Block broadcast_227_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:32.214 INFO  BlockManagerInfo - Added broadcast_227_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:32.214 INFO  SparkContext - Created broadcast 227 from newAPIHadoopFile at PathSplitSource.java:96
20:33:32.234 INFO  FileInputFormat - Total input files to process : 1
20:33:32.236 INFO  MemoryStore - Block broadcast_228 stored as values in memory (estimated size 160.7 KiB, free 1917.7 MiB)
20:33:32.237 INFO  MemoryStore - Block broadcast_228_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:32.237 INFO  BlockManagerInfo - Added broadcast_228_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:32.237 INFO  SparkContext - Created broadcast 228 from broadcast at ReadsSparkSink.java:133
20:33:32.239 INFO  MemoryStore - Block broadcast_229 stored as values in memory (estimated size 163.2 KiB, free 1917.5 MiB)
20:33:32.239 INFO  MemoryStore - Block broadcast_229_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:32.239 INFO  BlockManagerInfo - Added broadcast_229_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:32.240 INFO  SparkContext - Created broadcast 229 from broadcast at BamSink.java:76
20:33:32.241 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:32.241 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:32.241 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:32.258 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:32.259 INFO  DAGScheduler - Registering RDD 568 (mapToPair at SparkUtils.java:161) as input to shuffle 26
20:33:32.259 INFO  DAGScheduler - Got job 87 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:32.259 INFO  DAGScheduler - Final stage: ResultStage 125 (runJob at SparkHadoopWriter.scala:83)
20:33:32.259 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 124)
20:33:32.259 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 124)
20:33:32.259 INFO  DAGScheduler - Submitting ShuffleMapStage 124 (MapPartitionsRDD[568] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:32.276 INFO  MemoryStore - Block broadcast_230 stored as values in memory (estimated size 520.4 KiB, free 1917.0 MiB)
20:33:32.278 INFO  MemoryStore - Block broadcast_230_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.9 MiB)
20:33:32.278 INFO  BlockManagerInfo - Added broadcast_230_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:32.278 INFO  SparkContext - Created broadcast 230 from broadcast at DAGScheduler.scala:1580
20:33:32.278 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 124 (MapPartitionsRDD[568] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:32.278 INFO  TaskSchedulerImpl - Adding task set 124.0 with 1 tasks resource profile 0
20:33:32.279 INFO  TaskSetManager - Starting task 0.0 in stage 124.0 (TID 180) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:32.279 INFO  Executor - Running task 0.0 in stage 124.0 (TID 180)
20:33:32.309 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:32.324 INFO  Executor - Finished task 0.0 in stage 124.0 (TID 180). 1148 bytes result sent to driver
20:33:32.325 INFO  TaskSetManager - Finished task 0.0 in stage 124.0 (TID 180) in 47 ms on localhost (executor driver) (1/1)
20:33:32.325 INFO  TaskSchedulerImpl - Removed TaskSet 124.0, whose tasks have all completed, from pool 
20:33:32.325 INFO  DAGScheduler - ShuffleMapStage 124 (mapToPair at SparkUtils.java:161) finished in 0.066 s
20:33:32.325 INFO  DAGScheduler - looking for newly runnable stages
20:33:32.325 INFO  DAGScheduler - running: HashSet()
20:33:32.325 INFO  DAGScheduler - waiting: HashSet(ResultStage 125)
20:33:32.325 INFO  DAGScheduler - failed: HashSet()
20:33:32.325 INFO  DAGScheduler - Submitting ResultStage 125 (MapPartitionsRDD[573] at mapToPair at BamSink.java:91), which has no missing parents
20:33:32.332 INFO  MemoryStore - Block broadcast_231 stored as values in memory (estimated size 241.4 KiB, free 1916.6 MiB)
20:33:32.333 INFO  MemoryStore - Block broadcast_231_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1916.6 MiB)
20:33:32.333 INFO  BlockManagerInfo - Added broadcast_231_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.4 MiB)
20:33:32.333 INFO  SparkContext - Created broadcast 231 from broadcast at DAGScheduler.scala:1580
20:33:32.334 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 125 (MapPartitionsRDD[573] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:32.334 INFO  TaskSchedulerImpl - Adding task set 125.0 with 1 tasks resource profile 0
20:33:32.334 INFO  TaskSetManager - Starting task 0.0 in stage 125.0 (TID 181) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:32.334 INFO  Executor - Running task 0.0 in stage 125.0 (TID 181)
20:33:32.339 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:32.339 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:32.350 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:32.350 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:32.350 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:32.351 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:32.351 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:32.351 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:32.375 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033328919158099043749577_0573_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest118135869094132375630.bam.parts/_temporary/0/task_202507152033328919158099043749577_0573_r_000000
20:33:32.375 INFO  SparkHadoopMapRedUtil - attempt_202507152033328919158099043749577_0573_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:32.376 INFO  Executor - Finished task 0.0 in stage 125.0 (TID 181). 1858 bytes result sent to driver
20:33:32.376 INFO  TaskSetManager - Finished task 0.0 in stage 125.0 (TID 181) in 42 ms on localhost (executor driver) (1/1)
20:33:32.376 INFO  TaskSchedulerImpl - Removed TaskSet 125.0, whose tasks have all completed, from pool 
20:33:32.377 INFO  DAGScheduler - ResultStage 125 (runJob at SparkHadoopWriter.scala:83) finished in 0.051 s
20:33:32.377 INFO  DAGScheduler - Job 87 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.377 INFO  TaskSchedulerImpl - Killing all running tasks in stage 125: Stage finished
20:33:32.377 INFO  DAGScheduler - Job 87 finished: runJob at SparkHadoopWriter.scala:83, took 0.118622 s
20:33:32.377 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033328919158099043749577_0573.
20:33:32.382 INFO  SparkHadoopWriter - Write Job job_202507152033328919158099043749577_0573 committed. Elapsed time: 5 ms.
20:33:32.395 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest118135869094132375630.bam
20:33:32.399 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest118135869094132375630.bam done
20:33:32.400 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest118135869094132375630.bam.parts/ to /tmp/ReadsSparkSinkUnitTest118135869094132375630.bam.sbi
20:33:32.405 INFO  IndexFileMerger - Done merging .sbi files
20:33:32.405 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest118135869094132375630.bam.parts/ to /tmp/ReadsSparkSinkUnitTest118135869094132375630.bam.bai
20:33:32.410 INFO  IndexFileMerger - Done merging .bai files
20:33:32.412 INFO  MemoryStore - Block broadcast_232 stored as values in memory (estimated size 13.3 KiB, free 1916.6 MiB)
20:33:32.413 INFO  MemoryStore - Block broadcast_232_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1916.5 MiB)
20:33:32.413 INFO  BlockManagerInfo - Added broadcast_232_piece0 in memory on localhost:45281 (size: 8.3 KiB, free: 1919.3 MiB)
20:33:32.413 INFO  SparkContext - Created broadcast 232 from broadcast at BamSource.java:104
20:33:32.414 INFO  MemoryStore - Block broadcast_233 stored as values in memory (estimated size 297.9 KiB, free 1916.3 MiB)
20:33:32.420 INFO  MemoryStore - Block broadcast_233_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.2 MiB)
20:33:32.420 INFO  BlockManagerInfo - Added broadcast_233_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:32.420 INFO  SparkContext - Created broadcast 233 from newAPIHadoopFile at PathSplitSource.java:96
20:33:32.429 INFO  FileInputFormat - Total input files to process : 1
20:33:32.443 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:32.443 INFO  DAGScheduler - Got job 88 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:32.443 INFO  DAGScheduler - Final stage: ResultStage 126 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:32.443 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.443 INFO  DAGScheduler - Missing parents: List()
20:33:32.444 INFO  DAGScheduler - Submitting ResultStage 126 (MapPartitionsRDD[579] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.450 INFO  MemoryStore - Block broadcast_234 stored as values in memory (estimated size 148.2 KiB, free 1916.1 MiB)
20:33:32.450 INFO  MemoryStore - Block broadcast_234_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1916.0 MiB)
20:33:32.450 INFO  BlockManagerInfo - Added broadcast_234_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.2 MiB)
20:33:32.451 INFO  SparkContext - Created broadcast 234 from broadcast at DAGScheduler.scala:1580
20:33:32.451 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 126 (MapPartitionsRDD[579] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.451 INFO  TaskSchedulerImpl - Adding task set 126.0 with 1 tasks resource profile 0
20:33:32.451 INFO  TaskSetManager - Starting task 0.0 in stage 126.0 (TID 182) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:32.452 INFO  Executor - Running task 0.0 in stage 126.0 (TID 182)
20:33:32.463 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest118135869094132375630.bam:0+237038
20:33:32.467 INFO  Executor - Finished task 0.0 in stage 126.0 (TID 182). 651483 bytes result sent to driver
20:33:32.469 INFO  TaskSetManager - Finished task 0.0 in stage 126.0 (TID 182) in 18 ms on localhost (executor driver) (1/1)
20:33:32.469 INFO  TaskSchedulerImpl - Removed TaskSet 126.0, whose tasks have all completed, from pool 
20:33:32.469 INFO  DAGScheduler - ResultStage 126 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.025 s
20:33:32.469 INFO  DAGScheduler - Job 88 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.469 INFO  TaskSchedulerImpl - Killing all running tasks in stage 126: Stage finished
20:33:32.470 INFO  DAGScheduler - Job 88 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.026445 s
20:33:32.479 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:32.479 INFO  DAGScheduler - Got job 89 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:32.479 INFO  DAGScheduler - Final stage: ResultStage 127 (count at ReadsSparkSinkUnitTest.java:185)
20:33:32.479 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.480 INFO  DAGScheduler - Missing parents: List()
20:33:32.480 INFO  DAGScheduler - Submitting ResultStage 127 (MapPartitionsRDD[561] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.497 INFO  MemoryStore - Block broadcast_235 stored as values in memory (estimated size 426.1 KiB, free 1915.6 MiB)
20:33:32.499 INFO  MemoryStore - Block broadcast_235_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.4 MiB)
20:33:32.499 INFO  BlockManagerInfo - Added broadcast_235_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.1 MiB)
20:33:32.499 INFO  SparkContext - Created broadcast 235 from broadcast at DAGScheduler.scala:1580
20:33:32.499 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 127 (MapPartitionsRDD[561] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.499 INFO  TaskSchedulerImpl - Adding task set 127.0 with 1 tasks resource profile 0
20:33:32.500 INFO  TaskSetManager - Starting task 0.0 in stage 127.0 (TID 183) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:32.500 INFO  Executor - Running task 0.0 in stage 127.0 (TID 183)
20:33:32.532 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:32.541 INFO  Executor - Finished task 0.0 in stage 127.0 (TID 183). 989 bytes result sent to driver
20:33:32.542 INFO  TaskSetManager - Finished task 0.0 in stage 127.0 (TID 183) in 42 ms on localhost (executor driver) (1/1)
20:33:32.542 INFO  TaskSchedulerImpl - Removed TaskSet 127.0, whose tasks have all completed, from pool 
20:33:32.542 INFO  DAGScheduler - ResultStage 127 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.062 s
20:33:32.542 INFO  DAGScheduler - Job 89 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.542 INFO  TaskSchedulerImpl - Killing all running tasks in stage 127: Stage finished
20:33:32.542 INFO  DAGScheduler - Job 89 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.063123 s
20:33:32.545 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:32.546 INFO  DAGScheduler - Got job 90 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:32.546 INFO  DAGScheduler - Final stage: ResultStage 128 (count at ReadsSparkSinkUnitTest.java:185)
20:33:32.546 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.546 INFO  DAGScheduler - Missing parents: List()
20:33:32.546 INFO  DAGScheduler - Submitting ResultStage 128 (MapPartitionsRDD[579] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.552 INFO  MemoryStore - Block broadcast_236 stored as values in memory (estimated size 148.1 KiB, free 1915.3 MiB)
20:33:32.553 INFO  MemoryStore - Block broadcast_236_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1915.2 MiB)
20:33:32.553 INFO  BlockManagerInfo - Added broadcast_236_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.0 MiB)
20:33:32.553 INFO  SparkContext - Created broadcast 236 from broadcast at DAGScheduler.scala:1580
20:33:32.553 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 128 (MapPartitionsRDD[579] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.553 INFO  TaskSchedulerImpl - Adding task set 128.0 with 1 tasks resource profile 0
20:33:32.554 INFO  TaskSetManager - Starting task 0.0 in stage 128.0 (TID 184) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:32.554 INFO  Executor - Running task 0.0 in stage 128.0 (TID 184)
20:33:32.565 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest118135869094132375630.bam:0+237038
20:33:32.575 INFO  Executor - Finished task 0.0 in stage 128.0 (TID 184). 1075 bytes result sent to driver
20:33:32.575 INFO  BlockManagerInfo - Removed broadcast_229_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.0 MiB)
20:33:32.575 INFO  TaskSetManager - Finished task 0.0 in stage 128.0 (TID 184) in 21 ms on localhost (executor driver) (1/1)
20:33:32.575 INFO  TaskSchedulerImpl - Removed TaskSet 128.0, whose tasks have all completed, from pool 
20:33:32.575 INFO  DAGScheduler - ResultStage 128 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.029 s
20:33:32.575 INFO  DAGScheduler - Job 90 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.575 INFO  TaskSchedulerImpl - Killing all running tasks in stage 128: Stage finished
20:33:32.576 INFO  DAGScheduler - Job 90 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.030032 s
20:33:32.576 INFO  BlockManagerInfo - Removed broadcast_224_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.2 MiB)
20:33:32.576 INFO  BlockManagerInfo - Removed broadcast_221_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.2 MiB)
20:33:32.577 INFO  BlockManagerInfo - Removed broadcast_215_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:32.577 INFO  BlockManagerInfo - Removed broadcast_230_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.4 MiB)
20:33:32.578 INFO  BlockManagerInfo - Removed broadcast_228_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:32.579 INFO  BlockManagerInfo - Removed broadcast_231_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.5 MiB)
20:33:32.579 INFO  MemoryStore - Block broadcast_237 stored as values in memory (estimated size 297.9 KiB, free 1917.2 MiB)
20:33:32.580 INFO  BlockManagerInfo - Removed broadcast_227_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:32.580 INFO  BlockManagerInfo - Removed broadcast_225_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.6 MiB)
20:33:32.581 INFO  BlockManagerInfo - Removed broadcast_235_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:32.581 INFO  BlockManagerInfo - Removed broadcast_222_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:32.582 INFO  BlockManagerInfo - Removed broadcast_234_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.8 MiB)
20:33:32.587 INFO  MemoryStore - Block broadcast_237_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.8 MiB)
20:33:32.587 INFO  BlockManagerInfo - Added broadcast_237_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:32.587 INFO  SparkContext - Created broadcast 237 from newAPIHadoopFile at PathSplitSource.java:96
20:33:32.615 INFO  MemoryStore - Block broadcast_238 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:32.622 INFO  MemoryStore - Block broadcast_238_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:32.622 INFO  BlockManagerInfo - Added broadcast_238_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:32.623 INFO  SparkContext - Created broadcast 238 from newAPIHadoopFile at PathSplitSource.java:96
20:33:32.643 INFO  FileInputFormat - Total input files to process : 1
20:33:32.645 INFO  MemoryStore - Block broadcast_239 stored as values in memory (estimated size 160.7 KiB, free 1918.3 MiB)
20:33:32.646 INFO  MemoryStore - Block broadcast_239_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.3 MiB)
20:33:32.646 INFO  BlockManagerInfo - Added broadcast_239_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:32.646 INFO  SparkContext - Created broadcast 239 from broadcast at ReadsSparkSink.java:133
20:33:32.646 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:32.647 INFO  MemoryStore - Block broadcast_240 stored as values in memory (estimated size 163.2 KiB, free 1918.1 MiB)
20:33:32.648 INFO  MemoryStore - Block broadcast_240_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.1 MiB)
20:33:32.648 INFO  BlockManagerInfo - Added broadcast_240_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:32.648 INFO  SparkContext - Created broadcast 240 from broadcast at BamSink.java:76
20:33:32.650 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:32.650 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:32.650 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:32.667 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:32.668 INFO  DAGScheduler - Registering RDD 593 (mapToPair at SparkUtils.java:161) as input to shuffle 27
20:33:32.668 INFO  DAGScheduler - Got job 91 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:32.668 INFO  DAGScheduler - Final stage: ResultStage 130 (runJob at SparkHadoopWriter.scala:83)
20:33:32.668 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 129)
20:33:32.668 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 129)
20:33:32.668 INFO  DAGScheduler - Submitting ShuffleMapStage 129 (MapPartitionsRDD[593] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:32.686 INFO  MemoryStore - Block broadcast_241 stored as values in memory (estimated size 520.4 KiB, free 1917.6 MiB)
20:33:32.687 INFO  MemoryStore - Block broadcast_241_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.4 MiB)
20:33:32.687 INFO  BlockManagerInfo - Added broadcast_241_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.6 MiB)
20:33:32.687 INFO  SparkContext - Created broadcast 241 from broadcast at DAGScheduler.scala:1580
20:33:32.688 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 129 (MapPartitionsRDD[593] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:32.688 INFO  TaskSchedulerImpl - Adding task set 129.0 with 1 tasks resource profile 0
20:33:32.688 INFO  TaskSetManager - Starting task 0.0 in stage 129.0 (TID 185) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:32.689 INFO  Executor - Running task 0.0 in stage 129.0 (TID 185)
20:33:32.720 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:32.734 INFO  Executor - Finished task 0.0 in stage 129.0 (TID 185). 1148 bytes result sent to driver
20:33:32.735 INFO  TaskSetManager - Finished task 0.0 in stage 129.0 (TID 185) in 47 ms on localhost (executor driver) (1/1)
20:33:32.735 INFO  TaskSchedulerImpl - Removed TaskSet 129.0, whose tasks have all completed, from pool 
20:33:32.735 INFO  DAGScheduler - ShuffleMapStage 129 (mapToPair at SparkUtils.java:161) finished in 0.067 s
20:33:32.735 INFO  DAGScheduler - looking for newly runnable stages
20:33:32.735 INFO  DAGScheduler - running: HashSet()
20:33:32.735 INFO  DAGScheduler - waiting: HashSet(ResultStage 130)
20:33:32.735 INFO  DAGScheduler - failed: HashSet()
20:33:32.735 INFO  DAGScheduler - Submitting ResultStage 130 (MapPartitionsRDD[598] at mapToPair at BamSink.java:91), which has no missing parents
20:33:32.742 INFO  MemoryStore - Block broadcast_242 stored as values in memory (estimated size 241.4 KiB, free 1917.2 MiB)
20:33:32.743 INFO  MemoryStore - Block broadcast_242_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1917.1 MiB)
20:33:32.743 INFO  BlockManagerInfo - Added broadcast_242_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.5 MiB)
20:33:32.743 INFO  SparkContext - Created broadcast 242 from broadcast at DAGScheduler.scala:1580
20:33:32.744 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 130 (MapPartitionsRDD[598] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:32.744 INFO  TaskSchedulerImpl - Adding task set 130.0 with 1 tasks resource profile 0
20:33:32.744 INFO  TaskSetManager - Starting task 0.0 in stage 130.0 (TID 186) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:32.744 INFO  Executor - Running task 0.0 in stage 130.0 (TID 186)
20:33:32.748 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:32.749 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:32.760 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:32.760 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:32.760 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:32.760 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:32.760 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:32.760 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:32.780 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033325022211526911556788_0598_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest114281110706525560475.bam.parts/_temporary/0/task_202507152033325022211526911556788_0598_r_000000
20:33:32.780 INFO  SparkHadoopMapRedUtil - attempt_202507152033325022211526911556788_0598_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:32.780 INFO  Executor - Finished task 0.0 in stage 130.0 (TID 186). 1858 bytes result sent to driver
20:33:32.781 INFO  TaskSetManager - Finished task 0.0 in stage 130.0 (TID 186) in 37 ms on localhost (executor driver) (1/1)
20:33:32.781 INFO  TaskSchedulerImpl - Removed TaskSet 130.0, whose tasks have all completed, from pool 
20:33:32.781 INFO  DAGScheduler - ResultStage 130 (runJob at SparkHadoopWriter.scala:83) finished in 0.045 s
20:33:32.781 INFO  DAGScheduler - Job 91 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.781 INFO  TaskSchedulerImpl - Killing all running tasks in stage 130: Stage finished
20:33:32.781 INFO  DAGScheduler - Job 91 finished: runJob at SparkHadoopWriter.scala:83, took 0.114197 s
20:33:32.782 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033325022211526911556788_0598.
20:33:32.787 INFO  SparkHadoopWriter - Write Job job_202507152033325022211526911556788_0598 committed. Elapsed time: 4 ms.
20:33:32.799 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest114281110706525560475.bam
20:33:32.803 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest114281110706525560475.bam done
20:33:32.803 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest114281110706525560475.bam.parts/ to /tmp/ReadsSparkSinkUnitTest114281110706525560475.bam.bai
20:33:32.808 INFO  IndexFileMerger - Done merging .bai files
20:33:32.811 INFO  MemoryStore - Block broadcast_243 stored as values in memory (estimated size 297.9 KiB, free 1916.8 MiB)
20:33:32.817 INFO  MemoryStore - Block broadcast_243_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.8 MiB)
20:33:32.817 INFO  BlockManagerInfo - Added broadcast_243_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:32.818 INFO  SparkContext - Created broadcast 243 from newAPIHadoopFile at PathSplitSource.java:96
20:33:32.837 INFO  FileInputFormat - Total input files to process : 1
20:33:32.872 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:32.873 INFO  DAGScheduler - Got job 92 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:32.873 INFO  DAGScheduler - Final stage: ResultStage 131 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:32.873 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.873 INFO  DAGScheduler - Missing parents: List()
20:33:32.873 INFO  DAGScheduler - Submitting ResultStage 131 (MapPartitionsRDD[605] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.890 INFO  MemoryStore - Block broadcast_244 stored as values in memory (estimated size 426.2 KiB, free 1916.4 MiB)
20:33:32.891 INFO  MemoryStore - Block broadcast_244_piece0 stored as bytes in memory (estimated size 153.7 KiB, free 1916.2 MiB)
20:33:32.891 INFO  BlockManagerInfo - Added broadcast_244_piece0 in memory on localhost:45281 (size: 153.7 KiB, free: 1919.3 MiB)
20:33:32.892 INFO  SparkContext - Created broadcast 244 from broadcast at DAGScheduler.scala:1580
20:33:32.892 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 131 (MapPartitionsRDD[605] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.892 INFO  TaskSchedulerImpl - Adding task set 131.0 with 1 tasks resource profile 0
20:33:32.892 INFO  TaskSetManager - Starting task 0.0 in stage 131.0 (TID 187) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:32.893 INFO  Executor - Running task 0.0 in stage 131.0 (TID 187)
20:33:32.923 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest114281110706525560475.bam:0+237038
20:33:32.937 INFO  Executor - Finished task 0.0 in stage 131.0 (TID 187). 651526 bytes result sent to driver
20:33:32.939 INFO  TaskSetManager - Finished task 0.0 in stage 131.0 (TID 187) in 47 ms on localhost (executor driver) (1/1)
20:33:32.939 INFO  TaskSchedulerImpl - Removed TaskSet 131.0, whose tasks have all completed, from pool 
20:33:32.940 INFO  DAGScheduler - ResultStage 131 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.066 s
20:33:32.940 INFO  DAGScheduler - Job 92 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:32.940 INFO  TaskSchedulerImpl - Killing all running tasks in stage 131: Stage finished
20:33:32.940 INFO  DAGScheduler - Job 92 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.067321 s
20:33:32.949 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:32.950 INFO  DAGScheduler - Got job 93 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:32.950 INFO  DAGScheduler - Final stage: ResultStage 132 (count at ReadsSparkSinkUnitTest.java:185)
20:33:32.950 INFO  DAGScheduler - Parents of final stage: List()
20:33:32.950 INFO  DAGScheduler - Missing parents: List()
20:33:32.950 INFO  DAGScheduler - Submitting ResultStage 132 (MapPartitionsRDD[586] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:32.967 INFO  MemoryStore - Block broadcast_245 stored as values in memory (estimated size 426.1 KiB, free 1915.8 MiB)
20:33:32.968 INFO  MemoryStore - Block broadcast_245_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.6 MiB)
20:33:32.968 INFO  BlockManagerInfo - Added broadcast_245_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.1 MiB)
20:33:32.969 INFO  SparkContext - Created broadcast 245 from broadcast at DAGScheduler.scala:1580
20:33:32.969 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 132 (MapPartitionsRDD[586] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:32.969 INFO  TaskSchedulerImpl - Adding task set 132.0 with 1 tasks resource profile 0
20:33:32.969 INFO  TaskSetManager - Starting task 0.0 in stage 132.0 (TID 188) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:32.970 INFO  Executor - Running task 0.0 in stage 132.0 (TID 188)
20:33:33.001 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:33.010 INFO  Executor - Finished task 0.0 in stage 132.0 (TID 188). 989 bytes result sent to driver
20:33:33.011 INFO  TaskSetManager - Finished task 0.0 in stage 132.0 (TID 188) in 42 ms on localhost (executor driver) (1/1)
20:33:33.011 INFO  TaskSchedulerImpl - Removed TaskSet 132.0, whose tasks have all completed, from pool 
20:33:33.011 INFO  DAGScheduler - ResultStage 132 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.061 s
20:33:33.011 INFO  DAGScheduler - Job 93 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.011 INFO  TaskSchedulerImpl - Killing all running tasks in stage 132: Stage finished
20:33:33.011 INFO  DAGScheduler - Job 93 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061947 s
20:33:33.016 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:33.016 INFO  DAGScheduler - Got job 94 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:33.016 INFO  DAGScheduler - Final stage: ResultStage 133 (count at ReadsSparkSinkUnitTest.java:185)
20:33:33.016 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.016 INFO  DAGScheduler - Missing parents: List()
20:33:33.016 INFO  DAGScheduler - Submitting ResultStage 133 (MapPartitionsRDD[605] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.033 INFO  MemoryStore - Block broadcast_246 stored as values in memory (estimated size 426.1 KiB, free 1915.2 MiB)
20:33:33.035 INFO  MemoryStore - Block broadcast_246_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1915.1 MiB)
20:33:33.035 INFO  BlockManagerInfo - Added broadcast_246_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.0 MiB)
20:33:33.035 INFO  SparkContext - Created broadcast 246 from broadcast at DAGScheduler.scala:1580
20:33:33.035 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 133 (MapPartitionsRDD[605] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.035 INFO  TaskSchedulerImpl - Adding task set 133.0 with 1 tasks resource profile 0
20:33:33.036 INFO  TaskSetManager - Starting task 0.0 in stage 133.0 (TID 189) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:33.036 INFO  Executor - Running task 0.0 in stage 133.0 (TID 189)
20:33:33.070 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest114281110706525560475.bam:0+237038
20:33:33.082 INFO  Executor - Finished task 0.0 in stage 133.0 (TID 189). 989 bytes result sent to driver
20:33:33.082 INFO  TaskSetManager - Finished task 0.0 in stage 133.0 (TID 189) in 46 ms on localhost (executor driver) (1/1)
20:33:33.082 INFO  TaskSchedulerImpl - Removed TaskSet 133.0, whose tasks have all completed, from pool 
20:33:33.082 INFO  DAGScheduler - ResultStage 133 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.065 s
20:33:33.082 INFO  DAGScheduler - Job 94 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.082 INFO  TaskSchedulerImpl - Killing all running tasks in stage 133: Stage finished
20:33:33.082 INFO  DAGScheduler - Job 94 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.066531 s
20:33:33.085 INFO  MemoryStore - Block broadcast_247 stored as values in memory (estimated size 297.9 KiB, free 1914.8 MiB)
20:33:33.091 INFO  MemoryStore - Block broadcast_247_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1914.7 MiB)
20:33:33.091 INFO  BlockManagerInfo - Added broadcast_247_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1918.9 MiB)
20:33:33.092 INFO  SparkContext - Created broadcast 247 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.099 INFO  BlockManagerInfo - Removed broadcast_236_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.0 MiB)
20:33:33.101 INFO  BlockManagerInfo - Removed broadcast_245_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.2 MiB)
20:33:33.101 INFO  BlockManagerInfo - Removed broadcast_238_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:33.103 INFO  BlockManagerInfo - Removed broadcast_242_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.3 MiB)
20:33:33.104 INFO  BlockManagerInfo - Removed broadcast_232_piece0 on localhost:45281 in memory (size: 8.3 KiB, free: 1919.3 MiB)
20:33:33.104 INFO  BlockManagerInfo - Removed broadcast_233_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:33.105 INFO  BlockManagerInfo - Removed broadcast_239_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:33.105 INFO  BlockManagerInfo - Removed broadcast_244_piece0 on localhost:45281 in memory (size: 153.7 KiB, free: 1919.5 MiB)
20:33:33.106 INFO  BlockManagerInfo - Removed broadcast_226_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:33.107 INFO  BlockManagerInfo - Removed broadcast_237_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:33.108 INFO  BlockManagerInfo - Removed broadcast_241_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.7 MiB)
20:33:33.108 INFO  BlockManagerInfo - Removed broadcast_240_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:33.109 INFO  BlockManagerInfo - Removed broadcast_243_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:33.109 INFO  BlockManagerInfo - Removed broadcast_246_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1920.0 MiB)
20:33:33.132 INFO  MemoryStore - Block broadcast_248 stored as values in memory (estimated size 297.9 KiB, free 1919.4 MiB)
20:33:33.139 INFO  MemoryStore - Block broadcast_248_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:33.139 INFO  BlockManagerInfo - Added broadcast_248_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:33.139 INFO  SparkContext - Created broadcast 248 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.159 INFO  FileInputFormat - Total input files to process : 1
20:33:33.161 INFO  MemoryStore - Block broadcast_249 stored as values in memory (estimated size 160.7 KiB, free 1919.2 MiB)
20:33:33.162 INFO  MemoryStore - Block broadcast_249_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.2 MiB)
20:33:33.162 INFO  BlockManagerInfo - Added broadcast_249_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:33.162 INFO  SparkContext - Created broadcast 249 from broadcast at ReadsSparkSink.java:133
20:33:33.162 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:33.163 INFO  MemoryStore - Block broadcast_250 stored as values in memory (estimated size 163.2 KiB, free 1919.0 MiB)
20:33:33.164 INFO  MemoryStore - Block broadcast_250_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.0 MiB)
20:33:33.164 INFO  BlockManagerInfo - Added broadcast_250_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:33.164 INFO  SparkContext - Created broadcast 250 from broadcast at BamSink.java:76
20:33:33.166 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:33.166 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:33.166 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:33.182 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:33.183 INFO  DAGScheduler - Registering RDD 619 (mapToPair at SparkUtils.java:161) as input to shuffle 28
20:33:33.183 INFO  DAGScheduler - Got job 95 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:33.183 INFO  DAGScheduler - Final stage: ResultStage 135 (runJob at SparkHadoopWriter.scala:83)
20:33:33.183 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 134)
20:33:33.183 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 134)
20:33:33.183 INFO  DAGScheduler - Submitting ShuffleMapStage 134 (MapPartitionsRDD[619] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:33.201 INFO  MemoryStore - Block broadcast_251 stored as values in memory (estimated size 520.4 KiB, free 1918.5 MiB)
20:33:33.202 INFO  MemoryStore - Block broadcast_251_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1918.3 MiB)
20:33:33.202 INFO  BlockManagerInfo - Added broadcast_251_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.7 MiB)
20:33:33.203 INFO  SparkContext - Created broadcast 251 from broadcast at DAGScheduler.scala:1580
20:33:33.203 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 134 (MapPartitionsRDD[619] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:33.203 INFO  TaskSchedulerImpl - Adding task set 134.0 with 1 tasks resource profile 0
20:33:33.203 INFO  TaskSetManager - Starting task 0.0 in stage 134.0 (TID 190) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:33.204 INFO  Executor - Running task 0.0 in stage 134.0 (TID 190)
20:33:33.234 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:33.249 INFO  Executor - Finished task 0.0 in stage 134.0 (TID 190). 1148 bytes result sent to driver
20:33:33.249 INFO  TaskSetManager - Finished task 0.0 in stage 134.0 (TID 190) in 46 ms on localhost (executor driver) (1/1)
20:33:33.249 INFO  TaskSchedulerImpl - Removed TaskSet 134.0, whose tasks have all completed, from pool 
20:33:33.250 INFO  DAGScheduler - ShuffleMapStage 134 (mapToPair at SparkUtils.java:161) finished in 0.067 s
20:33:33.250 INFO  DAGScheduler - looking for newly runnable stages
20:33:33.250 INFO  DAGScheduler - running: HashSet()
20:33:33.250 INFO  DAGScheduler - waiting: HashSet(ResultStage 135)
20:33:33.250 INFO  DAGScheduler - failed: HashSet()
20:33:33.250 INFO  DAGScheduler - Submitting ResultStage 135 (MapPartitionsRDD[624] at mapToPair at BamSink.java:91), which has no missing parents
20:33:33.257 INFO  MemoryStore - Block broadcast_252 stored as values in memory (estimated size 241.4 KiB, free 1918.1 MiB)
20:33:33.257 INFO  MemoryStore - Block broadcast_252_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1918.0 MiB)
20:33:33.258 INFO  BlockManagerInfo - Added broadcast_252_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.7 MiB)
20:33:33.258 INFO  SparkContext - Created broadcast 252 from broadcast at DAGScheduler.scala:1580
20:33:33.258 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 135 (MapPartitionsRDD[624] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:33.258 INFO  TaskSchedulerImpl - Adding task set 135.0 with 1 tasks resource profile 0
20:33:33.259 INFO  TaskSetManager - Starting task 0.0 in stage 135.0 (TID 191) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:33.259 INFO  Executor - Running task 0.0 in stage 135.0 (TID 191)
20:33:33.264 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:33.264 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:33.276 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:33.276 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:33.276 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:33.276 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:33.276 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:33.276 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:33.297 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033335910700706260748643_0624_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest113340149219245041903.bam.parts/_temporary/0/task_202507152033335910700706260748643_0624_r_000000
20:33:33.297 INFO  SparkHadoopMapRedUtil - attempt_202507152033335910700706260748643_0624_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:33.297 INFO  Executor - Finished task 0.0 in stage 135.0 (TID 191). 1858 bytes result sent to driver
20:33:33.298 INFO  TaskSetManager - Finished task 0.0 in stage 135.0 (TID 191) in 39 ms on localhost (executor driver) (1/1)
20:33:33.298 INFO  TaskSchedulerImpl - Removed TaskSet 135.0, whose tasks have all completed, from pool 
20:33:33.298 INFO  DAGScheduler - ResultStage 135 (runJob at SparkHadoopWriter.scala:83) finished in 0.048 s
20:33:33.298 INFO  DAGScheduler - Job 95 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.298 INFO  TaskSchedulerImpl - Killing all running tasks in stage 135: Stage finished
20:33:33.298 INFO  DAGScheduler - Job 95 finished: runJob at SparkHadoopWriter.scala:83, took 0.116027 s
20:33:33.299 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033335910700706260748643_0624.
20:33:33.303 INFO  SparkHadoopWriter - Write Job job_202507152033335910700706260748643_0624 committed. Elapsed time: 4 ms.
20:33:33.315 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest113340149219245041903.bam
20:33:33.319 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest113340149219245041903.bam done
20:33:33.319 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest113340149219245041903.bam.parts/ to /tmp/ReadsSparkSinkUnitTest113340149219245041903.bam.sbi
20:33:33.324 INFO  IndexFileMerger - Done merging .sbi files
20:33:33.326 INFO  MemoryStore - Block broadcast_253 stored as values in memory (estimated size 320.0 B, free 1918.0 MiB)
20:33:33.326 INFO  MemoryStore - Block broadcast_253_piece0 stored as bytes in memory (estimated size 233.0 B, free 1918.0 MiB)
20:33:33.326 INFO  BlockManagerInfo - Added broadcast_253_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.7 MiB)
20:33:33.327 INFO  SparkContext - Created broadcast 253 from broadcast at BamSource.java:104
20:33:33.328 INFO  MemoryStore - Block broadcast_254 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:33.335 INFO  MemoryStore - Block broadcast_254_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:33.335 INFO  BlockManagerInfo - Added broadcast_254_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:33.335 INFO  SparkContext - Created broadcast 254 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.344 INFO  FileInputFormat - Total input files to process : 1
20:33:33.358 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:33.358 INFO  DAGScheduler - Got job 96 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:33.358 INFO  DAGScheduler - Final stage: ResultStage 136 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:33.358 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.358 INFO  DAGScheduler - Missing parents: List()
20:33:33.358 INFO  DAGScheduler - Submitting ResultStage 136 (MapPartitionsRDD[630] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.364 INFO  MemoryStore - Block broadcast_255 stored as values in memory (estimated size 148.2 KiB, free 1917.5 MiB)
20:33:33.365 INFO  MemoryStore - Block broadcast_255_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.5 MiB)
20:33:33.365 INFO  BlockManagerInfo - Added broadcast_255_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:33.365 INFO  SparkContext - Created broadcast 255 from broadcast at DAGScheduler.scala:1580
20:33:33.366 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 136 (MapPartitionsRDD[630] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.366 INFO  TaskSchedulerImpl - Adding task set 136.0 with 1 tasks resource profile 0
20:33:33.366 INFO  TaskSetManager - Starting task 0.0 in stage 136.0 (TID 192) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:33.367 INFO  Executor - Running task 0.0 in stage 136.0 (TID 192)
20:33:33.378 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest113340149219245041903.bam:0+237038
20:33:33.382 INFO  Executor - Finished task 0.0 in stage 136.0 (TID 192). 651483 bytes result sent to driver
20:33:33.384 INFO  TaskSetManager - Finished task 0.0 in stage 136.0 (TID 192) in 18 ms on localhost (executor driver) (1/1)
20:33:33.384 INFO  TaskSchedulerImpl - Removed TaskSet 136.0, whose tasks have all completed, from pool 
20:33:33.384 INFO  DAGScheduler - ResultStage 136 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.026 s
20:33:33.384 INFO  DAGScheduler - Job 96 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.384 INFO  TaskSchedulerImpl - Killing all running tasks in stage 136: Stage finished
20:33:33.384 INFO  DAGScheduler - Job 96 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.026287 s
20:33:33.393 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:33.394 INFO  DAGScheduler - Got job 97 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:33.394 INFO  DAGScheduler - Final stage: ResultStage 137 (count at ReadsSparkSinkUnitTest.java:185)
20:33:33.394 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.394 INFO  DAGScheduler - Missing parents: List()
20:33:33.394 INFO  DAGScheduler - Submitting ResultStage 137 (MapPartitionsRDD[612] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.411 INFO  MemoryStore - Block broadcast_256 stored as values in memory (estimated size 426.1 KiB, free 1917.1 MiB)
20:33:33.413 INFO  MemoryStore - Block broadcast_256_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.9 MiB)
20:33:33.413 INFO  BlockManagerInfo - Added broadcast_256_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.4 MiB)
20:33:33.413 INFO  SparkContext - Created broadcast 256 from broadcast at DAGScheduler.scala:1580
20:33:33.413 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 137 (MapPartitionsRDD[612] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.413 INFO  TaskSchedulerImpl - Adding task set 137.0 with 1 tasks resource profile 0
20:33:33.414 INFO  TaskSetManager - Starting task 0.0 in stage 137.0 (TID 193) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:33.414 INFO  Executor - Running task 0.0 in stage 137.0 (TID 193)
20:33:33.446 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:33.455 INFO  Executor - Finished task 0.0 in stage 137.0 (TID 193). 989 bytes result sent to driver
20:33:33.455 INFO  TaskSetManager - Finished task 0.0 in stage 137.0 (TID 193) in 41 ms on localhost (executor driver) (1/1)
20:33:33.456 INFO  TaskSchedulerImpl - Removed TaskSet 137.0, whose tasks have all completed, from pool 
20:33:33.456 INFO  DAGScheduler - ResultStage 137 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.062 s
20:33:33.456 INFO  DAGScheduler - Job 97 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.456 INFO  TaskSchedulerImpl - Killing all running tasks in stage 137: Stage finished
20:33:33.456 INFO  DAGScheduler - Job 97 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.062488 s
20:33:33.459 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:33.459 INFO  DAGScheduler - Got job 98 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:33.459 INFO  DAGScheduler - Final stage: ResultStage 138 (count at ReadsSparkSinkUnitTest.java:185)
20:33:33.459 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.459 INFO  DAGScheduler - Missing parents: List()
20:33:33.460 INFO  DAGScheduler - Submitting ResultStage 138 (MapPartitionsRDD[630] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.466 INFO  MemoryStore - Block broadcast_257 stored as values in memory (estimated size 148.1 KiB, free 1916.8 MiB)
20:33:33.467 INFO  MemoryStore - Block broadcast_257_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1916.7 MiB)
20:33:33.467 INFO  BlockManagerInfo - Added broadcast_257_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.3 MiB)
20:33:33.467 INFO  SparkContext - Created broadcast 257 from broadcast at DAGScheduler.scala:1580
20:33:33.467 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 138 (MapPartitionsRDD[630] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.467 INFO  TaskSchedulerImpl - Adding task set 138.0 with 1 tasks resource profile 0
20:33:33.468 INFO  TaskSetManager - Starting task 0.0 in stage 138.0 (TID 194) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:33.468 INFO  Executor - Running task 0.0 in stage 138.0 (TID 194)
20:33:33.480 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest113340149219245041903.bam:0+237038
20:33:33.483 INFO  Executor - Finished task 0.0 in stage 138.0 (TID 194). 989 bytes result sent to driver
20:33:33.483 INFO  TaskSetManager - Finished task 0.0 in stage 138.0 (TID 194) in 15 ms on localhost (executor driver) (1/1)
20:33:33.483 INFO  TaskSchedulerImpl - Removed TaskSet 138.0, whose tasks have all completed, from pool 
20:33:33.484 INFO  DAGScheduler - ResultStage 138 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.024 s
20:33:33.484 INFO  DAGScheduler - Job 98 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.484 INFO  TaskSchedulerImpl - Killing all running tasks in stage 138: Stage finished
20:33:33.484 INFO  DAGScheduler - Job 98 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.024697 s
20:33:33.486 INFO  MemoryStore - Block broadcast_258 stored as values in memory (estimated size 297.9 KiB, free 1916.4 MiB)
20:33:33.492 INFO  MemoryStore - Block broadcast_258_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.4 MiB)
20:33:33.493 INFO  BlockManagerInfo - Added broadcast_258_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:33.493 INFO  SparkContext - Created broadcast 258 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.514 INFO  MemoryStore - Block broadcast_259 stored as values in memory (estimated size 297.9 KiB, free 1916.1 MiB)
20:33:33.520 INFO  MemoryStore - Block broadcast_259_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.0 MiB)
20:33:33.520 INFO  BlockManagerInfo - Added broadcast_259_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:33.520 INFO  SparkContext - Created broadcast 259 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.540 INFO  FileInputFormat - Total input files to process : 1
20:33:33.541 INFO  MemoryStore - Block broadcast_260 stored as values in memory (estimated size 160.7 KiB, free 1915.9 MiB)
20:33:33.547 INFO  MemoryStore - Block broadcast_260_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.4 MiB)
20:33:33.547 INFO  BlockManagerInfo - Removed broadcast_256_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:33.547 INFO  BlockManagerInfo - Added broadcast_260_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:33.547 INFO  SparkContext - Created broadcast 260 from broadcast at ReadsSparkSink.java:133
20:33:33.547 INFO  BlockManagerInfo - Removed broadcast_250_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:33.548 INFO  BlockManagerInfo - Removed broadcast_259_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:33.548 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:33.548 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:33.548 INFO  BlockManagerInfo - Removed broadcast_248_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:33.549 INFO  MemoryStore - Block broadcast_261 stored as values in memory (estimated size 163.2 KiB, free 1917.1 MiB)
20:33:33.549 INFO  BlockManagerInfo - Removed broadcast_253_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.5 MiB)
20:33:33.549 INFO  BlockManagerInfo - Removed broadcast_254_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:33.550 INFO  MemoryStore - Block broadcast_261_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:33.550 INFO  BlockManagerInfo - Added broadcast_261_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:33.550 INFO  SparkContext - Created broadcast 261 from broadcast at BamSink.java:76
20:33:33.551 INFO  BlockManagerInfo - Removed broadcast_247_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:33.551 INFO  BlockManagerInfo - Removed broadcast_251_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.8 MiB)
20:33:33.552 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:33.552 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:33.552 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:33.552 INFO  BlockManagerInfo - Removed broadcast_252_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.8 MiB)
20:33:33.553 INFO  BlockManagerInfo - Removed broadcast_257_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.9 MiB)
20:33:33.553 INFO  BlockManagerInfo - Removed broadcast_249_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.9 MiB)
20:33:33.554 INFO  BlockManagerInfo - Removed broadcast_255_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.9 MiB)
20:33:33.570 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:33.570 INFO  DAGScheduler - Registering RDD 644 (mapToPair at SparkUtils.java:161) as input to shuffle 29
20:33:33.571 INFO  DAGScheduler - Got job 99 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:33.571 INFO  DAGScheduler - Final stage: ResultStage 140 (runJob at SparkHadoopWriter.scala:83)
20:33:33.571 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 139)
20:33:33.571 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 139)
20:33:33.571 INFO  DAGScheduler - Submitting ShuffleMapStage 139 (MapPartitionsRDD[644] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:33.589 INFO  MemoryStore - Block broadcast_262 stored as values in memory (estimated size 520.4 KiB, free 1918.8 MiB)
20:33:33.590 INFO  MemoryStore - Block broadcast_262_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1918.7 MiB)
20:33:33.590 INFO  BlockManagerInfo - Added broadcast_262_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.8 MiB)
20:33:33.590 INFO  SparkContext - Created broadcast 262 from broadcast at DAGScheduler.scala:1580
20:33:33.591 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 139 (MapPartitionsRDD[644] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:33.591 INFO  TaskSchedulerImpl - Adding task set 139.0 with 1 tasks resource profile 0
20:33:33.591 INFO  TaskSetManager - Starting task 0.0 in stage 139.0 (TID 195) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:33.592 INFO  Executor - Running task 0.0 in stage 139.0 (TID 195)
20:33:33.623 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:33.638 INFO  Executor - Finished task 0.0 in stage 139.0 (TID 195). 1148 bytes result sent to driver
20:33:33.638 INFO  TaskSetManager - Finished task 0.0 in stage 139.0 (TID 195) in 47 ms on localhost (executor driver) (1/1)
20:33:33.638 INFO  TaskSchedulerImpl - Removed TaskSet 139.0, whose tasks have all completed, from pool 
20:33:33.638 INFO  DAGScheduler - ShuffleMapStage 139 (mapToPair at SparkUtils.java:161) finished in 0.067 s
20:33:33.638 INFO  DAGScheduler - looking for newly runnable stages
20:33:33.638 INFO  DAGScheduler - running: HashSet()
20:33:33.639 INFO  DAGScheduler - waiting: HashSet(ResultStage 140)
20:33:33.639 INFO  DAGScheduler - failed: HashSet()
20:33:33.639 INFO  DAGScheduler - Submitting ResultStage 140 (MapPartitionsRDD[649] at mapToPair at BamSink.java:91), which has no missing parents
20:33:33.645 INFO  MemoryStore - Block broadcast_263 stored as values in memory (estimated size 241.4 KiB, free 1918.4 MiB)
20:33:33.646 INFO  MemoryStore - Block broadcast_263_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1918.4 MiB)
20:33:33.646 INFO  BlockManagerInfo - Added broadcast_263_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.7 MiB)
20:33:33.647 INFO  SparkContext - Created broadcast 263 from broadcast at DAGScheduler.scala:1580
20:33:33.647 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 140 (MapPartitionsRDD[649] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:33.647 INFO  TaskSchedulerImpl - Adding task set 140.0 with 1 tasks resource profile 0
20:33:33.647 INFO  TaskSetManager - Starting task 0.0 in stage 140.0 (TID 196) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:33.648 INFO  Executor - Running task 0.0 in stage 140.0 (TID 196)
20:33:33.652 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:33.652 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:33.664 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:33.664 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:33.664 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:33.664 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:33.664 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:33.664 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:33.679 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203333706588816948752493_0649_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest1105006437971140925.bam.parts/_temporary/0/task_20250715203333706588816948752493_0649_r_000000
20:33:33.679 INFO  SparkHadoopMapRedUtil - attempt_20250715203333706588816948752493_0649_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:33.679 INFO  Executor - Finished task 0.0 in stage 140.0 (TID 196). 1858 bytes result sent to driver
20:33:33.680 INFO  TaskSetManager - Finished task 0.0 in stage 140.0 (TID 196) in 33 ms on localhost (executor driver) (1/1)
20:33:33.680 INFO  TaskSchedulerImpl - Removed TaskSet 140.0, whose tasks have all completed, from pool 
20:33:33.680 INFO  DAGScheduler - ResultStage 140 (runJob at SparkHadoopWriter.scala:83) finished in 0.041 s
20:33:33.680 INFO  DAGScheduler - Job 99 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.680 INFO  TaskSchedulerImpl - Killing all running tasks in stage 140: Stage finished
20:33:33.680 INFO  DAGScheduler - Job 99 finished: runJob at SparkHadoopWriter.scala:83, took 0.110176 s
20:33:33.680 INFO  SparkHadoopWriter - Start to commit write Job job_20250715203333706588816948752493_0649.
20:33:33.685 INFO  SparkHadoopWriter - Write Job job_20250715203333706588816948752493_0649 committed. Elapsed time: 4 ms.
20:33:33.696 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest1105006437971140925.bam
20:33:33.701 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest1105006437971140925.bam done
20:33:33.703 INFO  MemoryStore - Block broadcast_264 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:33.710 INFO  MemoryStore - Block broadcast_264_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:33.710 INFO  BlockManagerInfo - Added broadcast_264_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:33.710 INFO  SparkContext - Created broadcast 264 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.730 INFO  FileInputFormat - Total input files to process : 1
20:33:33.765 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:33.765 INFO  DAGScheduler - Got job 100 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:33.765 INFO  DAGScheduler - Final stage: ResultStage 141 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:33.765 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.765 INFO  DAGScheduler - Missing parents: List()
20:33:33.765 INFO  DAGScheduler - Submitting ResultStage 141 (MapPartitionsRDD[656] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.782 INFO  MemoryStore - Block broadcast_265 stored as values in memory (estimated size 426.2 KiB, free 1917.6 MiB)
20:33:33.783 INFO  MemoryStore - Block broadcast_265_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.4 MiB)
20:33:33.784 INFO  BlockManagerInfo - Added broadcast_265_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:33.784 INFO  SparkContext - Created broadcast 265 from broadcast at DAGScheduler.scala:1580
20:33:33.784 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 141 (MapPartitionsRDD[656] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.784 INFO  TaskSchedulerImpl - Adding task set 141.0 with 1 tasks resource profile 0
20:33:33.785 INFO  TaskSetManager - Starting task 0.0 in stage 141.0 (TID 197) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7808 bytes) 
20:33:33.785 INFO  Executor - Running task 0.0 in stage 141.0 (TID 197)
20:33:33.816 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest1105006437971140925.bam:0+237038
20:33:33.828 INFO  Executor - Finished task 0.0 in stage 141.0 (TID 197). 651483 bytes result sent to driver
20:33:33.830 INFO  TaskSetManager - Finished task 0.0 in stage 141.0 (TID 197) in 45 ms on localhost (executor driver) (1/1)
20:33:33.830 INFO  TaskSchedulerImpl - Removed TaskSet 141.0, whose tasks have all completed, from pool 
20:33:33.830 INFO  DAGScheduler - ResultStage 141 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.064 s
20:33:33.830 INFO  DAGScheduler - Job 100 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.830 INFO  TaskSchedulerImpl - Killing all running tasks in stage 141: Stage finished
20:33:33.830 INFO  DAGScheduler - Job 100 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.065456 s
20:33:33.840 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:33.840 INFO  DAGScheduler - Got job 101 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:33.840 INFO  DAGScheduler - Final stage: ResultStage 142 (count at ReadsSparkSinkUnitTest.java:185)
20:33:33.840 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.840 INFO  DAGScheduler - Missing parents: List()
20:33:33.840 INFO  DAGScheduler - Submitting ResultStage 142 (MapPartitionsRDD[637] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.857 INFO  MemoryStore - Block broadcast_266 stored as values in memory (estimated size 426.1 KiB, free 1917.0 MiB)
20:33:33.859 INFO  MemoryStore - Block broadcast_266_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.9 MiB)
20:33:33.859 INFO  BlockManagerInfo - Added broadcast_266_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.4 MiB)
20:33:33.859 INFO  SparkContext - Created broadcast 266 from broadcast at DAGScheduler.scala:1580
20:33:33.859 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 142 (MapPartitionsRDD[637] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.859 INFO  TaskSchedulerImpl - Adding task set 142.0 with 1 tasks resource profile 0
20:33:33.860 INFO  TaskSetManager - Starting task 0.0 in stage 142.0 (TID 198) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:33.860 INFO  Executor - Running task 0.0 in stage 142.0 (TID 198)
20:33:33.891 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:33.900 INFO  Executor - Finished task 0.0 in stage 142.0 (TID 198). 989 bytes result sent to driver
20:33:33.901 INFO  TaskSetManager - Finished task 0.0 in stage 142.0 (TID 198) in 41 ms on localhost (executor driver) (1/1)
20:33:33.901 INFO  TaskSchedulerImpl - Removed TaskSet 142.0, whose tasks have all completed, from pool 
20:33:33.901 INFO  DAGScheduler - ResultStage 142 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.060 s
20:33:33.901 INFO  DAGScheduler - Job 101 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.901 INFO  TaskSchedulerImpl - Killing all running tasks in stage 142: Stage finished
20:33:33.901 INFO  DAGScheduler - Job 101 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061060 s
20:33:33.904 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:33.905 INFO  DAGScheduler - Got job 102 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:33.905 INFO  DAGScheduler - Final stage: ResultStage 143 (count at ReadsSparkSinkUnitTest.java:185)
20:33:33.905 INFO  DAGScheduler - Parents of final stage: List()
20:33:33.905 INFO  DAGScheduler - Missing parents: List()
20:33:33.905 INFO  DAGScheduler - Submitting ResultStage 143 (MapPartitionsRDD[656] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:33.922 INFO  MemoryStore - Block broadcast_267 stored as values in memory (estimated size 426.1 KiB, free 1916.5 MiB)
20:33:33.923 INFO  MemoryStore - Block broadcast_267_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.3 MiB)
20:33:33.923 INFO  BlockManagerInfo - Added broadcast_267_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.2 MiB)
20:33:33.923 INFO  SparkContext - Created broadcast 267 from broadcast at DAGScheduler.scala:1580
20:33:33.923 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 143 (MapPartitionsRDD[656] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:33.923 INFO  TaskSchedulerImpl - Adding task set 143.0 with 1 tasks resource profile 0
20:33:33.924 INFO  TaskSetManager - Starting task 0.0 in stage 143.0 (TID 199) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7808 bytes) 
20:33:33.924 INFO  Executor - Running task 0.0 in stage 143.0 (TID 199)
20:33:33.955 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest1105006437971140925.bam:0+237038
20:33:33.966 INFO  Executor - Finished task 0.0 in stage 143.0 (TID 199). 989 bytes result sent to driver
20:33:33.966 INFO  TaskSetManager - Finished task 0.0 in stage 143.0 (TID 199) in 42 ms on localhost (executor driver) (1/1)
20:33:33.966 INFO  TaskSchedulerImpl - Removed TaskSet 143.0, whose tasks have all completed, from pool 
20:33:33.967 INFO  DAGScheduler - ResultStage 143 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.062 s
20:33:33.967 INFO  DAGScheduler - Job 102 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:33.967 INFO  TaskSchedulerImpl - Killing all running tasks in stage 143: Stage finished
20:33:33.967 INFO  DAGScheduler - Job 102 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.062483 s
20:33:33.970 INFO  MemoryStore - Block broadcast_268 stored as values in memory (estimated size 298.0 KiB, free 1916.0 MiB)
20:33:33.976 INFO  MemoryStore - Block broadcast_268_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1916.0 MiB)
20:33:33.976 INFO  BlockManagerInfo - Added broadcast_268_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.2 MiB)
20:33:33.976 INFO  SparkContext - Created broadcast 268 from newAPIHadoopFile at PathSplitSource.java:96
20:33:33.998 INFO  MemoryStore - Block broadcast_269 stored as values in memory (estimated size 298.0 KiB, free 1915.7 MiB)
20:33:34.005 INFO  MemoryStore - Block broadcast_269_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1915.6 MiB)
20:33:34.005 INFO  BlockManagerInfo - Added broadcast_269_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.1 MiB)
20:33:34.005 INFO  SparkContext - Created broadcast 269 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.025 INFO  FileInputFormat - Total input files to process : 1
20:33:34.028 INFO  MemoryStore - Block broadcast_270 stored as values in memory (estimated size 160.7 KiB, free 1915.5 MiB)
20:33:34.028 INFO  MemoryStore - Block broadcast_270_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1915.5 MiB)
20:33:34.028 INFO  BlockManagerInfo - Added broadcast_270_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.1 MiB)
20:33:34.029 INFO  SparkContext - Created broadcast 270 from broadcast at ReadsSparkSink.java:133
20:33:34.030 INFO  MemoryStore - Block broadcast_271 stored as values in memory (estimated size 163.2 KiB, free 1915.3 MiB)
20:33:34.036 INFO  MemoryStore - Block broadcast_271_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1915.6 MiB)
20:33:34.036 INFO  BlockManagerInfo - Removed broadcast_258_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.1 MiB)
20:33:34.036 INFO  BlockManagerInfo - Added broadcast_271_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.1 MiB)
20:33:34.036 INFO  SparkContext - Created broadcast 271 from broadcast at BamSink.java:76
20:33:34.037 INFO  BlockManagerInfo - Removed broadcast_262_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.3 MiB)
20:33:34.037 INFO  BlockManagerInfo - Removed broadcast_264_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:34.038 INFO  BlockManagerInfo - Removed broadcast_269_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.4 MiB)
20:33:34.038 INFO  BlockManagerInfo - Removed broadcast_261_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:34.038 INFO  BlockManagerInfo - Removed broadcast_266_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.6 MiB)
20:33:34.039 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.039 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.039 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.039 INFO  BlockManagerInfo - Removed broadcast_263_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.6 MiB)
20:33:34.039 INFO  BlockManagerInfo - Removed broadcast_267_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:34.040 INFO  BlockManagerInfo - Removed broadcast_260_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:34.040 INFO  BlockManagerInfo - Removed broadcast_265_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.9 MiB)
20:33:34.056 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:34.056 INFO  DAGScheduler - Registering RDD 670 (mapToPair at SparkUtils.java:161) as input to shuffle 30
20:33:34.057 INFO  DAGScheduler - Got job 103 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:34.057 INFO  DAGScheduler - Final stage: ResultStage 145 (runJob at SparkHadoopWriter.scala:83)
20:33:34.057 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 144)
20:33:34.057 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 144)
20:33:34.057 INFO  DAGScheduler - Submitting ShuffleMapStage 144 (MapPartitionsRDD[670] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:34.074 INFO  MemoryStore - Block broadcast_272 stored as values in memory (estimated size 520.4 KiB, free 1918.8 MiB)
20:33:34.076 INFO  MemoryStore - Block broadcast_272_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1918.7 MiB)
20:33:34.076 INFO  BlockManagerInfo - Added broadcast_272_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.8 MiB)
20:33:34.076 INFO  SparkContext - Created broadcast 272 from broadcast at DAGScheduler.scala:1580
20:33:34.076 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 144 (MapPartitionsRDD[670] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:34.076 INFO  TaskSchedulerImpl - Adding task set 144.0 with 1 tasks resource profile 0
20:33:34.077 INFO  TaskSetManager - Starting task 0.0 in stage 144.0 (TID 200) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7901 bytes) 
20:33:34.077 INFO  Executor - Running task 0.0 in stage 144.0 (TID 200)
20:33:34.109 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:34.126 INFO  Executor - Finished task 0.0 in stage 144.0 (TID 200). 1148 bytes result sent to driver
20:33:34.127 INFO  TaskSetManager - Finished task 0.0 in stage 144.0 (TID 200) in 49 ms on localhost (executor driver) (1/1)
20:33:34.127 INFO  TaskSchedulerImpl - Removed TaskSet 144.0, whose tasks have all completed, from pool 
20:33:34.127 INFO  DAGScheduler - ShuffleMapStage 144 (mapToPair at SparkUtils.java:161) finished in 0.070 s
20:33:34.127 INFO  DAGScheduler - looking for newly runnable stages
20:33:34.127 INFO  DAGScheduler - running: HashSet()
20:33:34.127 INFO  DAGScheduler - waiting: HashSet(ResultStage 145)
20:33:34.127 INFO  DAGScheduler - failed: HashSet()
20:33:34.127 INFO  DAGScheduler - Submitting ResultStage 145 (MapPartitionsRDD[675] at mapToPair at BamSink.java:91), which has no missing parents
20:33:34.134 INFO  MemoryStore - Block broadcast_273 stored as values in memory (estimated size 241.4 KiB, free 1918.4 MiB)
20:33:34.135 INFO  MemoryStore - Block broadcast_273_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1918.4 MiB)
20:33:34.135 INFO  BlockManagerInfo - Added broadcast_273_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.7 MiB)
20:33:34.135 INFO  SparkContext - Created broadcast 273 from broadcast at DAGScheduler.scala:1580
20:33:34.135 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 145 (MapPartitionsRDD[675] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:34.135 INFO  TaskSchedulerImpl - Adding task set 145.0 with 1 tasks resource profile 0
20:33:34.136 INFO  TaskSetManager - Starting task 0.0 in stage 145.0 (TID 201) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:34.136 INFO  Executor - Running task 0.0 in stage 145.0 (TID 201)
20:33:34.140 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:34.141 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:34.157 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.157 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.157 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.158 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.158 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.158 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.182 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033344699770452007293954_0675_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest215847046718225444407.bam.parts/_temporary/0/task_202507152033344699770452007293954_0675_r_000000
20:33:34.182 INFO  SparkHadoopMapRedUtil - attempt_202507152033344699770452007293954_0675_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:34.183 INFO  Executor - Finished task 0.0 in stage 145.0 (TID 201). 1858 bytes result sent to driver
20:33:34.183 INFO  TaskSetManager - Finished task 0.0 in stage 145.0 (TID 201) in 47 ms on localhost (executor driver) (1/1)
20:33:34.183 INFO  TaskSchedulerImpl - Removed TaskSet 145.0, whose tasks have all completed, from pool 
20:33:34.183 INFO  DAGScheduler - ResultStage 145 (runJob at SparkHadoopWriter.scala:83) finished in 0.056 s
20:33:34.184 INFO  DAGScheduler - Job 103 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.184 INFO  TaskSchedulerImpl - Killing all running tasks in stage 145: Stage finished
20:33:34.184 INFO  DAGScheduler - Job 103 finished: runJob at SparkHadoopWriter.scala:83, took 0.127796 s
20:33:34.184 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033344699770452007293954_0675.
20:33:34.189 INFO  SparkHadoopWriter - Write Job job_202507152033344699770452007293954_0675 committed. Elapsed time: 4 ms.
20:33:34.201 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest215847046718225444407.bam
20:33:34.205 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest215847046718225444407.bam done
20:33:34.205 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest215847046718225444407.bam.parts/ to /tmp/ReadsSparkSinkUnitTest215847046718225444407.bam.sbi
20:33:34.210 INFO  IndexFileMerger - Done merging .sbi files
20:33:34.210 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest215847046718225444407.bam.parts/ to /tmp/ReadsSparkSinkUnitTest215847046718225444407.bam.bai
20:33:34.216 INFO  IndexFileMerger - Done merging .bai files
20:33:34.219 INFO  MemoryStore - Block broadcast_274 stored as values in memory (estimated size 320.0 B, free 1918.4 MiB)
20:33:34.219 INFO  MemoryStore - Block broadcast_274_piece0 stored as bytes in memory (estimated size 233.0 B, free 1918.4 MiB)
20:33:34.219 INFO  BlockManagerInfo - Added broadcast_274_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.7 MiB)
20:33:34.219 INFO  SparkContext - Created broadcast 274 from broadcast at BamSource.java:104
20:33:34.220 INFO  MemoryStore - Block broadcast_275 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:34.227 INFO  MemoryStore - Block broadcast_275_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:34.227 INFO  BlockManagerInfo - Added broadcast_275_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:34.227 INFO  SparkContext - Created broadcast 275 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.236 INFO  FileInputFormat - Total input files to process : 1
20:33:34.250 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:34.250 INFO  DAGScheduler - Got job 104 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:34.250 INFO  DAGScheduler - Final stage: ResultStage 146 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:34.250 INFO  DAGScheduler - Parents of final stage: List()
20:33:34.251 INFO  DAGScheduler - Missing parents: List()
20:33:34.251 INFO  DAGScheduler - Submitting ResultStage 146 (MapPartitionsRDD[681] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:34.257 INFO  MemoryStore - Block broadcast_276 stored as values in memory (estimated size 148.2 KiB, free 1917.9 MiB)
20:33:34.257 INFO  MemoryStore - Block broadcast_276_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1917.8 MiB)
20:33:34.258 INFO  BlockManagerInfo - Added broadcast_276_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.6 MiB)
20:33:34.258 INFO  SparkContext - Created broadcast 276 from broadcast at DAGScheduler.scala:1580
20:33:34.258 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 146 (MapPartitionsRDD[681] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:34.258 INFO  TaskSchedulerImpl - Adding task set 146.0 with 1 tasks resource profile 0
20:33:34.259 INFO  TaskSetManager - Starting task 0.0 in stage 146.0 (TID 202) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:34.259 INFO  Executor - Running task 0.0 in stage 146.0 (TID 202)
20:33:34.271 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest215847046718225444407.bam:0+235514
20:33:34.276 INFO  Executor - Finished task 0.0 in stage 146.0 (TID 202). 650141 bytes result sent to driver
20:33:34.277 INFO  TaskSetManager - Finished task 0.0 in stage 146.0 (TID 202) in 19 ms on localhost (executor driver) (1/1)
20:33:34.277 INFO  TaskSchedulerImpl - Removed TaskSet 146.0, whose tasks have all completed, from pool 
20:33:34.278 INFO  DAGScheduler - ResultStage 146 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.026 s
20:33:34.278 INFO  DAGScheduler - Job 104 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.278 INFO  TaskSchedulerImpl - Killing all running tasks in stage 146: Stage finished
20:33:34.278 INFO  DAGScheduler - Job 104 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.027684 s
20:33:34.289 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:34.289 INFO  DAGScheduler - Got job 105 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:34.289 INFO  DAGScheduler - Final stage: ResultStage 147 (count at ReadsSparkSinkUnitTest.java:185)
20:33:34.289 INFO  DAGScheduler - Parents of final stage: List()
20:33:34.289 INFO  DAGScheduler - Missing parents: List()
20:33:34.289 INFO  DAGScheduler - Submitting ResultStage 147 (MapPartitionsRDD[663] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:34.306 INFO  MemoryStore - Block broadcast_277 stored as values in memory (estimated size 426.1 KiB, free 1917.4 MiB)
20:33:34.308 INFO  MemoryStore - Block broadcast_277_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.2 MiB)
20:33:34.308 INFO  BlockManagerInfo - Added broadcast_277_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:34.308 INFO  SparkContext - Created broadcast 277 from broadcast at DAGScheduler.scala:1580
20:33:34.308 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 147 (MapPartitionsRDD[663] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:34.308 INFO  TaskSchedulerImpl - Adding task set 147.0 with 1 tasks resource profile 0
20:33:34.309 INFO  TaskSetManager - Starting task 0.0 in stage 147.0 (TID 203) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7912 bytes) 
20:33:34.309 INFO  Executor - Running task 0.0 in stage 147.0 (TID 203)
20:33:34.346 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:34.357 INFO  Executor - Finished task 0.0 in stage 147.0 (TID 203). 989 bytes result sent to driver
20:33:34.357 INFO  TaskSetManager - Finished task 0.0 in stage 147.0 (TID 203) in 48 ms on localhost (executor driver) (1/1)
20:33:34.357 INFO  TaskSchedulerImpl - Removed TaskSet 147.0, whose tasks have all completed, from pool 
20:33:34.358 INFO  DAGScheduler - ResultStage 147 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.068 s
20:33:34.358 INFO  DAGScheduler - Job 105 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.358 INFO  TaskSchedulerImpl - Killing all running tasks in stage 147: Stage finished
20:33:34.358 INFO  DAGScheduler - Job 105 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.068968 s
20:33:34.361 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:34.361 INFO  DAGScheduler - Got job 106 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:34.362 INFO  DAGScheduler - Final stage: ResultStage 148 (count at ReadsSparkSinkUnitTest.java:185)
20:33:34.362 INFO  DAGScheduler - Parents of final stage: List()
20:33:34.362 INFO  DAGScheduler - Missing parents: List()
20:33:34.362 INFO  DAGScheduler - Submitting ResultStage 148 (MapPartitionsRDD[681] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:34.368 INFO  MemoryStore - Block broadcast_278 stored as values in memory (estimated size 148.1 KiB, free 1917.1 MiB)
20:33:34.369 INFO  MemoryStore - Block broadcast_278_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1917.1 MiB)
20:33:34.369 INFO  BlockManagerInfo - Added broadcast_278_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.4 MiB)
20:33:34.369 INFO  SparkContext - Created broadcast 278 from broadcast at DAGScheduler.scala:1580
20:33:34.369 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 148 (MapPartitionsRDD[681] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:34.369 INFO  TaskSchedulerImpl - Adding task set 148.0 with 1 tasks resource profile 0
20:33:34.370 INFO  TaskSetManager - Starting task 0.0 in stage 148.0 (TID 204) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:34.370 INFO  Executor - Running task 0.0 in stage 148.0 (TID 204)
20:33:34.381 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest215847046718225444407.bam:0+235514
20:33:34.385 INFO  Executor - Finished task 0.0 in stage 148.0 (TID 204). 989 bytes result sent to driver
20:33:34.385 INFO  TaskSetManager - Finished task 0.0 in stage 148.0 (TID 204) in 15 ms on localhost (executor driver) (1/1)
20:33:34.385 INFO  TaskSchedulerImpl - Removed TaskSet 148.0, whose tasks have all completed, from pool 
20:33:34.385 INFO  DAGScheduler - ResultStage 148 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.023 s
20:33:34.385 INFO  DAGScheduler - Job 106 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.386 INFO  TaskSchedulerImpl - Killing all running tasks in stage 148: Stage finished
20:33:34.386 INFO  DAGScheduler - Job 106 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.024366 s
20:33:34.388 INFO  MemoryStore - Block broadcast_279 stored as values in memory (estimated size 298.0 KiB, free 1916.8 MiB)
20:33:34.395 INFO  MemoryStore - Block broadcast_279_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.7 MiB)
20:33:34.395 INFO  BlockManagerInfo - Added broadcast_279_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:34.395 INFO  SparkContext - Created broadcast 279 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.417 INFO  MemoryStore - Block broadcast_280 stored as values in memory (estimated size 298.0 KiB, free 1916.4 MiB)
20:33:34.423 INFO  MemoryStore - Block broadcast_280_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.4 MiB)
20:33:34.423 INFO  BlockManagerInfo - Added broadcast_280_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:34.424 INFO  SparkContext - Created broadcast 280 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.443 INFO  FileInputFormat - Total input files to process : 1
20:33:34.445 INFO  MemoryStore - Block broadcast_281 stored as values in memory (estimated size 19.6 KiB, free 1916.4 MiB)
20:33:34.445 INFO  MemoryStore - Block broadcast_281_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1916.3 MiB)
20:33:34.445 INFO  BlockManagerInfo - Added broadcast_281_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.3 MiB)
20:33:34.446 INFO  SparkContext - Created broadcast 281 from broadcast at ReadsSparkSink.java:133
20:33:34.446 INFO  MemoryStore - Block broadcast_282 stored as values in memory (estimated size 20.0 KiB, free 1916.3 MiB)
20:33:34.451 INFO  MemoryStore - Block broadcast_282_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1916.3 MiB)
20:33:34.452 INFO  BlockManagerInfo - Added broadcast_282_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.3 MiB)
20:33:34.452 INFO  BlockManagerInfo - Removed broadcast_270_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:34.452 INFO  SparkContext - Created broadcast 282 from broadcast at BamSink.java:76
20:33:34.452 INFO  BlockManagerInfo - Removed broadcast_273_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.4 MiB)
20:33:34.454 INFO  BlockManagerInfo - Removed broadcast_271_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:34.454 INFO  BlockManagerInfo - Removed broadcast_278_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.4 MiB)
20:33:34.455 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.455 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.455 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.455 INFO  BlockManagerInfo - Removed broadcast_280_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:34.456 INFO  BlockManagerInfo - Removed broadcast_268_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.5 MiB)
20:33:34.456 INFO  BlockManagerInfo - Removed broadcast_275_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:34.457 INFO  BlockManagerInfo - Removed broadcast_277_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:34.457 INFO  BlockManagerInfo - Removed broadcast_276_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.8 MiB)
20:33:34.457 INFO  BlockManagerInfo - Removed broadcast_272_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.9 MiB)
20:33:34.458 INFO  BlockManagerInfo - Removed broadcast_274_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.9 MiB)
20:33:34.472 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:34.473 INFO  DAGScheduler - Registering RDD 695 (mapToPair at SparkUtils.java:161) as input to shuffle 31
20:33:34.473 INFO  DAGScheduler - Got job 107 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:34.473 INFO  DAGScheduler - Final stage: ResultStage 150 (runJob at SparkHadoopWriter.scala:83)
20:33:34.473 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 149)
20:33:34.473 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 149)
20:33:34.473 INFO  DAGScheduler - Submitting ShuffleMapStage 149 (MapPartitionsRDD[695] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:34.491 INFO  MemoryStore - Block broadcast_283 stored as values in memory (estimated size 434.3 KiB, free 1919.2 MiB)
20:33:34.493 INFO  MemoryStore - Block broadcast_283_piece0 stored as bytes in memory (estimated size 157.6 KiB, free 1919.0 MiB)
20:33:34.493 INFO  BlockManagerInfo - Added broadcast_283_piece0 in memory on localhost:45281 (size: 157.6 KiB, free: 1919.8 MiB)
20:33:34.493 INFO  SparkContext - Created broadcast 283 from broadcast at DAGScheduler.scala:1580
20:33:34.493 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 149 (MapPartitionsRDD[695] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:34.493 INFO  TaskSchedulerImpl - Adding task set 149.0 with 1 tasks resource profile 0
20:33:34.494 INFO  TaskSetManager - Starting task 0.0 in stage 149.0 (TID 205) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7882 bytes) 
20:33:34.494 INFO  Executor - Running task 0.0 in stage 149.0 (TID 205)
20:33:34.529 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:34.542 INFO  Executor - Finished task 0.0 in stage 149.0 (TID 205). 1148 bytes result sent to driver
20:33:34.542 INFO  TaskSetManager - Finished task 0.0 in stage 149.0 (TID 205) in 48 ms on localhost (executor driver) (1/1)
20:33:34.542 INFO  TaskSchedulerImpl - Removed TaskSet 149.0, whose tasks have all completed, from pool 
20:33:34.542 INFO  DAGScheduler - ShuffleMapStage 149 (mapToPair at SparkUtils.java:161) finished in 0.069 s
20:33:34.542 INFO  DAGScheduler - looking for newly runnable stages
20:33:34.542 INFO  DAGScheduler - running: HashSet()
20:33:34.542 INFO  DAGScheduler - waiting: HashSet(ResultStage 150)
20:33:34.542 INFO  DAGScheduler - failed: HashSet()
20:33:34.542 INFO  DAGScheduler - Submitting ResultStage 150 (MapPartitionsRDD[700] at mapToPair at BamSink.java:91), which has no missing parents
20:33:34.553 INFO  MemoryStore - Block broadcast_284 stored as values in memory (estimated size 155.3 KiB, free 1918.9 MiB)
20:33:34.554 INFO  MemoryStore - Block broadcast_284_piece0 stored as bytes in memory (estimated size 58.4 KiB, free 1918.8 MiB)
20:33:34.554 INFO  BlockManagerInfo - Added broadcast_284_piece0 in memory on localhost:45281 (size: 58.4 KiB, free: 1919.7 MiB)
20:33:34.554 INFO  SparkContext - Created broadcast 284 from broadcast at DAGScheduler.scala:1580
20:33:34.555 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 150 (MapPartitionsRDD[700] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:34.555 INFO  TaskSchedulerImpl - Adding task set 150.0 with 1 tasks resource profile 0
20:33:34.555 INFO  TaskSetManager - Starting task 0.0 in stage 150.0 (TID 206) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:34.555 INFO  Executor - Running task 0.0 in stage 150.0 (TID 206)
20:33:34.559 INFO  ShuffleBlockFetcherIterator - Getting 1 (312.6 KiB) non-empty blocks including 1 (312.6 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:34.559 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:34.570 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.570 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.570 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.570 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.570 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.570 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.592 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033345274146992506357176_0700_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest34879729942705394985.bam.parts/_temporary/0/task_202507152033345274146992506357176_0700_r_000000
20:33:34.592 INFO  SparkHadoopMapRedUtil - attempt_202507152033345274146992506357176_0700_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:34.593 INFO  Executor - Finished task 0.0 in stage 150.0 (TID 206). 1858 bytes result sent to driver
20:33:34.593 INFO  TaskSetManager - Finished task 0.0 in stage 150.0 (TID 206) in 38 ms on localhost (executor driver) (1/1)
20:33:34.593 INFO  TaskSchedulerImpl - Removed TaskSet 150.0, whose tasks have all completed, from pool 
20:33:34.593 INFO  DAGScheduler - ResultStage 150 (runJob at SparkHadoopWriter.scala:83) finished in 0.050 s
20:33:34.593 INFO  DAGScheduler - Job 107 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.593 INFO  TaskSchedulerImpl - Killing all running tasks in stage 150: Stage finished
20:33:34.593 INFO  DAGScheduler - Job 107 finished: runJob at SparkHadoopWriter.scala:83, took 0.121219 s
20:33:34.594 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033345274146992506357176_0700.
20:33:34.598 INFO  SparkHadoopWriter - Write Job job_202507152033345274146992506357176_0700 committed. Elapsed time: 4 ms.
20:33:34.609 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest34879729942705394985.bam
20:33:34.613 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest34879729942705394985.bam done
20:33:34.613 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest34879729942705394985.bam.parts/ to /tmp/ReadsSparkSinkUnitTest34879729942705394985.bam.sbi
20:33:34.618 INFO  IndexFileMerger - Done merging .sbi files
20:33:34.618 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest34879729942705394985.bam.parts/ to /tmp/ReadsSparkSinkUnitTest34879729942705394985.bam.bai
20:33:34.622 INFO  IndexFileMerger - Done merging .bai files
20:33:34.624 INFO  MemoryStore - Block broadcast_285 stored as values in memory (estimated size 312.0 B, free 1918.8 MiB)
20:33:34.624 INFO  MemoryStore - Block broadcast_285_piece0 stored as bytes in memory (estimated size 231.0 B, free 1918.8 MiB)
20:33:34.624 INFO  BlockManagerInfo - Added broadcast_285_piece0 in memory on localhost:45281 (size: 231.0 B, free: 1919.7 MiB)
20:33:34.625 INFO  SparkContext - Created broadcast 285 from broadcast at BamSource.java:104
20:33:34.625 INFO  MemoryStore - Block broadcast_286 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:34.631 INFO  MemoryStore - Block broadcast_286_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.5 MiB)
20:33:34.631 INFO  BlockManagerInfo - Added broadcast_286_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:34.632 INFO  SparkContext - Created broadcast 286 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.640 INFO  FileInputFormat - Total input files to process : 1
20:33:34.654 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:34.655 INFO  DAGScheduler - Got job 108 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:34.655 INFO  DAGScheduler - Final stage: ResultStage 151 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:34.655 INFO  DAGScheduler - Parents of final stage: List()
20:33:34.655 INFO  DAGScheduler - Missing parents: List()
20:33:34.655 INFO  DAGScheduler - Submitting ResultStage 151 (MapPartitionsRDD[706] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:34.663 INFO  MemoryStore - Block broadcast_287 stored as values in memory (estimated size 148.2 KiB, free 1918.3 MiB)
20:33:34.664 INFO  MemoryStore - Block broadcast_287_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.3 MiB)
20:33:34.664 INFO  BlockManagerInfo - Added broadcast_287_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:34.664 INFO  SparkContext - Created broadcast 287 from broadcast at DAGScheduler.scala:1580
20:33:34.665 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 151 (MapPartitionsRDD[706] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:34.665 INFO  TaskSchedulerImpl - Adding task set 151.0 with 1 tasks resource profile 0
20:33:34.665 INFO  TaskSetManager - Starting task 0.0 in stage 151.0 (TID 207) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:34.665 INFO  Executor - Running task 0.0 in stage 151.0 (TID 207)
20:33:34.677 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest34879729942705394985.bam:0+236517
20:33:34.681 INFO  Executor - Finished task 0.0 in stage 151.0 (TID 207). 749470 bytes result sent to driver
20:33:34.684 INFO  TaskSetManager - Finished task 0.0 in stage 151.0 (TID 207) in 19 ms on localhost (executor driver) (1/1)
20:33:34.684 INFO  TaskSchedulerImpl - Removed TaskSet 151.0, whose tasks have all completed, from pool 
20:33:34.684 INFO  DAGScheduler - ResultStage 151 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.029 s
20:33:34.684 INFO  DAGScheduler - Job 108 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.684 INFO  TaskSchedulerImpl - Killing all running tasks in stage 151: Stage finished
20:33:34.684 INFO  DAGScheduler - Job 108 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.029966 s
20:33:34.695 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:34.695 INFO  DAGScheduler - Got job 109 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:34.695 INFO  DAGScheduler - Final stage: ResultStage 152 (count at ReadsSparkSinkUnitTest.java:185)
20:33:34.695 INFO  DAGScheduler - Parents of final stage: List()
20:33:34.695 INFO  DAGScheduler - Missing parents: List()
20:33:34.695 INFO  DAGScheduler - Submitting ResultStage 152 (MapPartitionsRDD[688] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:34.712 INFO  MemoryStore - Block broadcast_288 stored as values in memory (estimated size 426.1 KiB, free 1917.9 MiB)
20:33:34.713 INFO  MemoryStore - Block broadcast_288_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.7 MiB)
20:33:34.713 INFO  BlockManagerInfo - Added broadcast_288_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:34.714 INFO  SparkContext - Created broadcast 288 from broadcast at DAGScheduler.scala:1580
20:33:34.714 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 152 (MapPartitionsRDD[688] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:34.714 INFO  TaskSchedulerImpl - Adding task set 152.0 with 1 tasks resource profile 0
20:33:34.714 INFO  TaskSetManager - Starting task 0.0 in stage 152.0 (TID 208) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7893 bytes) 
20:33:34.715 INFO  Executor - Running task 0.0 in stage 152.0 (TID 208)
20:33:34.745 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:34.753 INFO  Executor - Finished task 0.0 in stage 152.0 (TID 208). 989 bytes result sent to driver
20:33:34.753 INFO  TaskSetManager - Finished task 0.0 in stage 152.0 (TID 208) in 39 ms on localhost (executor driver) (1/1)
20:33:34.753 INFO  TaskSchedulerImpl - Removed TaskSet 152.0, whose tasks have all completed, from pool 
20:33:34.753 INFO  DAGScheduler - ResultStage 152 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.057 s
20:33:34.753 INFO  DAGScheduler - Job 109 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.753 INFO  TaskSchedulerImpl - Killing all running tasks in stage 152: Stage finished
20:33:34.754 INFO  DAGScheduler - Job 109 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.058679 s
20:33:34.757 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:34.757 INFO  DAGScheduler - Got job 110 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:34.757 INFO  DAGScheduler - Final stage: ResultStage 153 (count at ReadsSparkSinkUnitTest.java:185)
20:33:34.757 INFO  DAGScheduler - Parents of final stage: List()
20:33:34.757 INFO  DAGScheduler - Missing parents: List()
20:33:34.757 INFO  DAGScheduler - Submitting ResultStage 153 (MapPartitionsRDD[706] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:34.763 INFO  MemoryStore - Block broadcast_289 stored as values in memory (estimated size 148.1 KiB, free 1917.6 MiB)
20:33:34.764 INFO  MemoryStore - Block broadcast_289_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1917.5 MiB)
20:33:34.764 INFO  BlockManagerInfo - Added broadcast_289_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.4 MiB)
20:33:34.765 INFO  SparkContext - Created broadcast 289 from broadcast at DAGScheduler.scala:1580
20:33:34.765 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 153 (MapPartitionsRDD[706] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:34.765 INFO  TaskSchedulerImpl - Adding task set 153.0 with 1 tasks resource profile 0
20:33:34.765 INFO  TaskSetManager - Starting task 0.0 in stage 153.0 (TID 209) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:34.766 INFO  Executor - Running task 0.0 in stage 153.0 (TID 209)
20:33:34.777 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest34879729942705394985.bam:0+236517
20:33:34.780 INFO  Executor - Finished task 0.0 in stage 153.0 (TID 209). 989 bytes result sent to driver
20:33:34.781 INFO  TaskSetManager - Finished task 0.0 in stage 153.0 (TID 209) in 16 ms on localhost (executor driver) (1/1)
20:33:34.781 INFO  TaskSchedulerImpl - Removed TaskSet 153.0, whose tasks have all completed, from pool 
20:33:34.781 INFO  DAGScheduler - ResultStage 153 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.023 s
20:33:34.781 INFO  DAGScheduler - Job 110 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.781 INFO  TaskSchedulerImpl - Killing all running tasks in stage 153: Stage finished
20:33:34.781 INFO  DAGScheduler - Job 110 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.024016 s
20:33:34.784 INFO  MemoryStore - Block broadcast_290 stored as values in memory (estimated size 576.0 B, free 1917.5 MiB)
20:33:34.784 INFO  MemoryStore - Block broadcast_290_piece0 stored as bytes in memory (estimated size 228.0 B, free 1917.5 MiB)
20:33:34.784 INFO  BlockManagerInfo - Added broadcast_290_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.4 MiB)
20:33:34.785 INFO  SparkContext - Created broadcast 290 from broadcast at CramSource.java:114
20:33:34.786 INFO  MemoryStore - Block broadcast_291 stored as values in memory (estimated size 297.9 KiB, free 1917.2 MiB)
20:33:34.792 INFO  MemoryStore - Block broadcast_291_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.2 MiB)
20:33:34.792 INFO  BlockManagerInfo - Added broadcast_291_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:34.792 INFO  SparkContext - Created broadcast 291 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.808 INFO  MemoryStore - Block broadcast_292 stored as values in memory (estimated size 576.0 B, free 1917.2 MiB)
20:33:34.808 INFO  MemoryStore - Block broadcast_292_piece0 stored as bytes in memory (estimated size 228.0 B, free 1917.2 MiB)
20:33:34.808 INFO  BlockManagerInfo - Added broadcast_292_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.4 MiB)
20:33:34.809 INFO  SparkContext - Created broadcast 292 from broadcast at CramSource.java:114
20:33:34.809 INFO  MemoryStore - Block broadcast_293 stored as values in memory (estimated size 297.9 KiB, free 1916.9 MiB)
20:33:34.816 INFO  BlockManagerInfo - Removed broadcast_282_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.4 MiB)
20:33:34.816 INFO  BlockManagerInfo - Removed broadcast_284_piece0 on localhost:45281 in memory (size: 58.4 KiB, free: 1919.4 MiB)
20:33:34.817 INFO  BlockManagerInfo - Removed broadcast_279_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:34.818 INFO  BlockManagerInfo - Removed broadcast_285_piece0 on localhost:45281 in memory (size: 231.0 B, free: 1919.5 MiB)
20:33:34.818 INFO  BlockManagerInfo - Removed broadcast_286_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:34.819 INFO  BlockManagerInfo - Removed broadcast_288_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:34.820 INFO  BlockManagerInfo - Removed broadcast_289_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.7 MiB)
20:33:34.820 INFO  BlockManagerInfo - Removed broadcast_283_piece0 on localhost:45281 in memory (size: 157.6 KiB, free: 1919.9 MiB)
20:33:34.820 INFO  BlockManagerInfo - Removed broadcast_281_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.9 MiB)
20:33:34.821 INFO  BlockManagerInfo - Removed broadcast_287_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1920.0 MiB)
20:33:34.822 INFO  MemoryStore - Block broadcast_293_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:34.822 INFO  BlockManagerInfo - Added broadcast_293_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:34.822 INFO  SparkContext - Created broadcast 293 from newAPIHadoopFile at PathSplitSource.java:96
20:33:34.837 INFO  FileInputFormat - Total input files to process : 1
20:33:34.838 INFO  MemoryStore - Block broadcast_294 stored as values in memory (estimated size 6.0 KiB, free 1919.3 MiB)
20:33:34.838 INFO  MemoryStore - Block broadcast_294_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1919.3 MiB)
20:33:34.838 INFO  BlockManagerInfo - Added broadcast_294_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.9 MiB)
20:33:34.839 INFO  SparkContext - Created broadcast 294 from broadcast at ReadsSparkSink.java:133
20:33:34.839 INFO  MemoryStore - Block broadcast_295 stored as values in memory (estimated size 6.2 KiB, free 1919.3 MiB)
20:33:34.839 INFO  MemoryStore - Block broadcast_295_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1919.3 MiB)
20:33:34.840 INFO  BlockManagerInfo - Added broadcast_295_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.9 MiB)
20:33:34.840 INFO  SparkContext - Created broadcast 295 from broadcast at CramSink.java:76
20:33:34.841 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.841 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.841 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.859 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:34.859 INFO  DAGScheduler - Registering RDD 718 (mapToPair at SparkUtils.java:161) as input to shuffle 32
20:33:34.859 INFO  DAGScheduler - Got job 111 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:34.859 INFO  DAGScheduler - Final stage: ResultStage 155 (runJob at SparkHadoopWriter.scala:83)
20:33:34.859 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 154)
20:33:34.859 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 154)
20:33:34.859 INFO  DAGScheduler - Submitting ShuffleMapStage 154 (MapPartitionsRDD[718] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:34.871 INFO  MemoryStore - Block broadcast_296 stored as values in memory (estimated size 292.8 KiB, free 1919.0 MiB)
20:33:34.872 INFO  MemoryStore - Block broadcast_296_piece0 stored as bytes in memory (estimated size 107.3 KiB, free 1918.9 MiB)
20:33:34.872 INFO  BlockManagerInfo - Added broadcast_296_piece0 in memory on localhost:45281 (size: 107.3 KiB, free: 1919.8 MiB)
20:33:34.872 INFO  SparkContext - Created broadcast 296 from broadcast at DAGScheduler.scala:1580
20:33:34.873 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 154 (MapPartitionsRDD[718] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:34.873 INFO  TaskSchedulerImpl - Adding task set 154.0 with 1 tasks resource profile 0
20:33:34.873 INFO  TaskSetManager - Starting task 0.0 in stage 154.0 (TID 210) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7869 bytes) 
20:33:34.874 INFO  Executor - Running task 0.0 in stage 154.0 (TID 210)
20:33:34.895 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:34.906 INFO  Executor - Finished task 0.0 in stage 154.0 (TID 210). 1148 bytes result sent to driver
20:33:34.906 INFO  TaskSetManager - Finished task 0.0 in stage 154.0 (TID 210) in 33 ms on localhost (executor driver) (1/1)
20:33:34.907 INFO  TaskSchedulerImpl - Removed TaskSet 154.0, whose tasks have all completed, from pool 
20:33:34.907 INFO  DAGScheduler - ShuffleMapStage 154 (mapToPair at SparkUtils.java:161) finished in 0.047 s
20:33:34.907 INFO  DAGScheduler - looking for newly runnable stages
20:33:34.907 INFO  DAGScheduler - running: HashSet()
20:33:34.907 INFO  DAGScheduler - waiting: HashSet(ResultStage 155)
20:33:34.907 INFO  DAGScheduler - failed: HashSet()
20:33:34.907 INFO  DAGScheduler - Submitting ResultStage 155 (MapPartitionsRDD[723] at mapToPair at CramSink.java:89), which has no missing parents
20:33:34.914 INFO  MemoryStore - Block broadcast_297 stored as values in memory (estimated size 153.2 KiB, free 1918.8 MiB)
20:33:34.914 INFO  MemoryStore - Block broadcast_297_piece0 stored as bytes in memory (estimated size 58.0 KiB, free 1918.7 MiB)
20:33:34.915 INFO  BlockManagerInfo - Added broadcast_297_piece0 in memory on localhost:45281 (size: 58.0 KiB, free: 1919.7 MiB)
20:33:34.915 INFO  SparkContext - Created broadcast 297 from broadcast at DAGScheduler.scala:1580
20:33:34.915 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 155 (MapPartitionsRDD[723] at mapToPair at CramSink.java:89) (first 15 tasks are for partitions Vector(0))
20:33:34.915 INFO  TaskSchedulerImpl - Adding task set 155.0 with 1 tasks resource profile 0
20:33:34.916 INFO  TaskSetManager - Starting task 0.0 in stage 155.0 (TID 211) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:34.916 INFO  Executor - Running task 0.0 in stage 155.0 (TID 211)
20:33:34.919 INFO  ShuffleBlockFetcherIterator - Getting 1 (82.3 KiB) non-empty blocks including 1 (82.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:34.920 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:34.926 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.926 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.926 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.926 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:34.926 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:34.926 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:34.976 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033346447708286455920504_0723_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest58282352449913242007.cram.parts/_temporary/0/task_202507152033346447708286455920504_0723_r_000000
20:33:34.976 INFO  SparkHadoopMapRedUtil - attempt_202507152033346447708286455920504_0723_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:34.976 INFO  Executor - Finished task 0.0 in stage 155.0 (TID 211). 1858 bytes result sent to driver
20:33:34.977 INFO  TaskSetManager - Finished task 0.0 in stage 155.0 (TID 211) in 62 ms on localhost (executor driver) (1/1)
20:33:34.977 INFO  TaskSchedulerImpl - Removed TaskSet 155.0, whose tasks have all completed, from pool 
20:33:34.977 INFO  DAGScheduler - ResultStage 155 (runJob at SparkHadoopWriter.scala:83) finished in 0.070 s
20:33:34.977 INFO  DAGScheduler - Job 111 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:34.977 INFO  TaskSchedulerImpl - Killing all running tasks in stage 155: Stage finished
20:33:34.977 INFO  DAGScheduler - Job 111 finished: runJob at SparkHadoopWriter.scala:83, took 0.118563 s
20:33:34.978 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033346447708286455920504_0723.
20:33:34.983 INFO  SparkHadoopWriter - Write Job job_202507152033346447708286455920504_0723 committed. Elapsed time: 4 ms.
20:33:34.996 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest58282352449913242007.cram
20:33:35.000 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest58282352449913242007.cram done
20:33:35.002 INFO  MemoryStore - Block broadcast_298 stored as values in memory (estimated size 504.0 B, free 1918.7 MiB)
20:33:35.003 INFO  MemoryStore - Block broadcast_298_piece0 stored as bytes in memory (estimated size 159.0 B, free 1918.7 MiB)
20:33:35.003 INFO  BlockManagerInfo - Added broadcast_298_piece0 in memory on localhost:45281 (size: 159.0 B, free: 1919.7 MiB)
20:33:35.003 INFO  SparkContext - Created broadcast 298 from broadcast at CramSource.java:114
20:33:35.004 INFO  MemoryStore - Block broadcast_299 stored as values in memory (estimated size 297.9 KiB, free 1918.4 MiB)
20:33:35.013 INFO  MemoryStore - Block broadcast_299_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:35.014 INFO  BlockManagerInfo - Added broadcast_299_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:35.014 INFO  SparkContext - Created broadcast 299 from newAPIHadoopFile at PathSplitSource.java:96
20:33:35.034 INFO  FileInputFormat - Total input files to process : 1
20:33:35.059 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:35.060 INFO  DAGScheduler - Got job 112 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:35.060 INFO  DAGScheduler - Final stage: ResultStage 156 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:35.060 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.060 INFO  DAGScheduler - Missing parents: List()
20:33:35.060 INFO  DAGScheduler - Submitting ResultStage 156 (MapPartitionsRDD[729] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:35.076 INFO  MemoryStore - Block broadcast_300 stored as values in memory (estimated size 286.8 KiB, free 1918.1 MiB)
20:33:35.077 INFO  MemoryStore - Block broadcast_300_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1918.0 MiB)
20:33:35.077 INFO  BlockManagerInfo - Added broadcast_300_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.6 MiB)
20:33:35.077 INFO  SparkContext - Created broadcast 300 from broadcast at DAGScheduler.scala:1580
20:33:35.078 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 156 (MapPartitionsRDD[729] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:35.078 INFO  TaskSchedulerImpl - Adding task set 156.0 with 1 tasks resource profile 0
20:33:35.078 INFO  TaskSetManager - Starting task 0.0 in stage 156.0 (TID 212) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:35.078 INFO  Executor - Running task 0.0 in stage 156.0 (TID 212)
20:33:35.099 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest58282352449913242007.cram:0+43713
20:33:35.127 INFO  Executor - Finished task 0.0 in stage 156.0 (TID 212). 154101 bytes result sent to driver
20:33:35.128 INFO  TaskSetManager - Finished task 0.0 in stage 156.0 (TID 212) in 50 ms on localhost (executor driver) (1/1)
20:33:35.128 INFO  TaskSchedulerImpl - Removed TaskSet 156.0, whose tasks have all completed, from pool 
20:33:35.128 INFO  DAGScheduler - ResultStage 156 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.068 s
20:33:35.128 INFO  DAGScheduler - Job 112 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.128 INFO  TaskSchedulerImpl - Killing all running tasks in stage 156: Stage finished
20:33:35.128 INFO  DAGScheduler - Job 112 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.068817 s
20:33:35.133 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:35.134 INFO  DAGScheduler - Got job 113 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:35.134 INFO  DAGScheduler - Final stage: ResultStage 157 (count at ReadsSparkSinkUnitTest.java:185)
20:33:35.134 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.134 INFO  DAGScheduler - Missing parents: List()
20:33:35.134 INFO  DAGScheduler - Submitting ResultStage 157 (MapPartitionsRDD[712] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:35.145 INFO  MemoryStore - Block broadcast_301 stored as values in memory (estimated size 286.8 KiB, free 1917.7 MiB)
20:33:35.146 INFO  MemoryStore - Block broadcast_301_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1917.6 MiB)
20:33:35.147 INFO  BlockManagerInfo - Added broadcast_301_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.5 MiB)
20:33:35.147 INFO  SparkContext - Created broadcast 301 from broadcast at DAGScheduler.scala:1580
20:33:35.147 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 157 (MapPartitionsRDD[712] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:35.147 INFO  TaskSchedulerImpl - Adding task set 157.0 with 1 tasks resource profile 0
20:33:35.147 INFO  TaskSetManager - Starting task 0.0 in stage 157.0 (TID 213) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7880 bytes) 
20:33:35.148 INFO  Executor - Running task 0.0 in stage 157.0 (TID 213)
20:33:35.172 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:35.181 INFO  Executor - Finished task 0.0 in stage 157.0 (TID 213). 989 bytes result sent to driver
20:33:35.181 INFO  TaskSetManager - Finished task 0.0 in stage 157.0 (TID 213) in 34 ms on localhost (executor driver) (1/1)
20:33:35.181 INFO  TaskSchedulerImpl - Removed TaskSet 157.0, whose tasks have all completed, from pool 
20:33:35.182 INFO  DAGScheduler - ResultStage 157 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.048 s
20:33:35.182 INFO  DAGScheduler - Job 113 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.182 INFO  TaskSchedulerImpl - Killing all running tasks in stage 157: Stage finished
20:33:35.182 INFO  DAGScheduler - Job 113 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.048412 s
20:33:35.186 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:35.186 INFO  DAGScheduler - Got job 114 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:35.186 INFO  DAGScheduler - Final stage: ResultStage 158 (count at ReadsSparkSinkUnitTest.java:185)
20:33:35.187 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.187 INFO  DAGScheduler - Missing parents: List()
20:33:35.187 INFO  DAGScheduler - Submitting ResultStage 158 (MapPartitionsRDD[729] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:35.199 INFO  MemoryStore - Block broadcast_302 stored as values in memory (estimated size 286.8 KiB, free 1917.3 MiB)
20:33:35.200 INFO  MemoryStore - Block broadcast_302_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1917.2 MiB)
20:33:35.201 INFO  BlockManagerInfo - Added broadcast_302_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.4 MiB)
20:33:35.201 INFO  SparkContext - Created broadcast 302 from broadcast at DAGScheduler.scala:1580
20:33:35.201 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 158 (MapPartitionsRDD[729] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:35.201 INFO  TaskSchedulerImpl - Adding task set 158.0 with 1 tasks resource profile 0
20:33:35.201 INFO  TaskSetManager - Starting task 0.0 in stage 158.0 (TID 214) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:35.202 INFO  Executor - Running task 0.0 in stage 158.0 (TID 214)
20:33:35.226 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest58282352449913242007.cram:0+43713
20:33:35.240 INFO  Executor - Finished task 0.0 in stage 158.0 (TID 214). 989 bytes result sent to driver
20:33:35.240 INFO  TaskSetManager - Finished task 0.0 in stage 158.0 (TID 214) in 39 ms on localhost (executor driver) (1/1)
20:33:35.240 INFO  TaskSchedulerImpl - Removed TaskSet 158.0, whose tasks have all completed, from pool 
20:33:35.240 INFO  DAGScheduler - ResultStage 158 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.053 s
20:33:35.241 INFO  DAGScheduler - Job 114 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.241 INFO  TaskSchedulerImpl - Killing all running tasks in stage 158: Stage finished
20:33:35.241 INFO  DAGScheduler - Job 114 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.054434 s
20:33:35.244 INFO  MemoryStore - Block broadcast_303 stored as values in memory (estimated size 297.9 KiB, free 1916.9 MiB)
20:33:35.252 INFO  BlockManagerInfo - Removed broadcast_295_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.4 MiB)
20:33:35.253 INFO  BlockManagerInfo - Removed broadcast_302_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.5 MiB)
20:33:35.253 INFO  BlockManagerInfo - Removed broadcast_293_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:35.253 INFO  BlockManagerInfo - Removed broadcast_292_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.5 MiB)
20:33:35.254 INFO  BlockManagerInfo - Removed broadcast_297_piece0 on localhost:45281 in memory (size: 58.0 KiB, free: 1919.6 MiB)
20:33:35.254 INFO  BlockManagerInfo - Removed broadcast_300_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.7 MiB)
20:33:35.255 INFO  BlockManagerInfo - Removed broadcast_294_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.7 MiB)
20:33:35.255 INFO  BlockManagerInfo - Removed broadcast_301_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.8 MiB)
20:33:35.256 INFO  BlockManagerInfo - Removed broadcast_299_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:35.256 INFO  BlockManagerInfo - Removed broadcast_290_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.8 MiB)
20:33:35.256 INFO  BlockManagerInfo - Removed broadcast_296_piece0 on localhost:45281 in memory (size: 107.3 KiB, free: 1920.0 MiB)
20:33:35.257 INFO  BlockManagerInfo - Removed broadcast_291_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1920.0 MiB)
20:33:35.258 INFO  MemoryStore - Block broadcast_303_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.7 MiB)
20:33:35.258 INFO  BlockManagerInfo - Added broadcast_303_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1920.0 MiB)
20:33:35.258 INFO  BlockManagerInfo - Removed broadcast_298_piece0 on localhost:45281 in memory (size: 159.0 B, free: 1920.0 MiB)
20:33:35.258 INFO  SparkContext - Created broadcast 303 from newAPIHadoopFile at PathSplitSource.java:96
20:33:35.280 INFO  MemoryStore - Block broadcast_304 stored as values in memory (estimated size 297.9 KiB, free 1919.4 MiB)
20:33:35.287 INFO  MemoryStore - Block broadcast_304_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:35.287 INFO  BlockManagerInfo - Added broadcast_304_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:35.287 INFO  SparkContext - Created broadcast 304 from newAPIHadoopFile at PathSplitSource.java:96
20:33:35.312 INFO  FileInputFormat - Total input files to process : 1
20:33:35.313 INFO  MemoryStore - Block broadcast_305 stored as values in memory (estimated size 160.7 KiB, free 1919.2 MiB)
20:33:35.314 INFO  MemoryStore - Block broadcast_305_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1919.2 MiB)
20:33:35.314 INFO  BlockManagerInfo - Added broadcast_305_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.9 MiB)
20:33:35.315 INFO  SparkContext - Created broadcast 305 from broadcast at ReadsSparkSink.java:133
20:33:35.318 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:35.318 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:35.318 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:35.335 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:35.335 INFO  DAGScheduler - Registering RDD 743 (mapToPair at SparkUtils.java:161) as input to shuffle 33
20:33:35.335 INFO  DAGScheduler - Got job 115 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:35.335 INFO  DAGScheduler - Final stage: ResultStage 160 (runJob at SparkHadoopWriter.scala:83)
20:33:35.335 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 159)
20:33:35.336 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 159)
20:33:35.336 INFO  DAGScheduler - Submitting ShuffleMapStage 159 (MapPartitionsRDD[743] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:35.353 INFO  MemoryStore - Block broadcast_306 stored as values in memory (estimated size 520.4 KiB, free 1918.6 MiB)
20:33:35.354 INFO  MemoryStore - Block broadcast_306_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1918.5 MiB)
20:33:35.354 INFO  BlockManagerInfo - Added broadcast_306_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.7 MiB)
20:33:35.354 INFO  SparkContext - Created broadcast 306 from broadcast at DAGScheduler.scala:1580
20:33:35.355 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 159 (MapPartitionsRDD[743] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:35.355 INFO  TaskSchedulerImpl - Adding task set 159.0 with 1 tasks resource profile 0
20:33:35.355 INFO  TaskSetManager - Starting task 0.0 in stage 159.0 (TID 215) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:35.355 INFO  Executor - Running task 0.0 in stage 159.0 (TID 215)
20:33:35.387 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:35.401 INFO  Executor - Finished task 0.0 in stage 159.0 (TID 215). 1148 bytes result sent to driver
20:33:35.402 INFO  TaskSetManager - Finished task 0.0 in stage 159.0 (TID 215) in 47 ms on localhost (executor driver) (1/1)
20:33:35.402 INFO  TaskSchedulerImpl - Removed TaskSet 159.0, whose tasks have all completed, from pool 
20:33:35.402 INFO  DAGScheduler - ShuffleMapStage 159 (mapToPair at SparkUtils.java:161) finished in 0.066 s
20:33:35.402 INFO  DAGScheduler - looking for newly runnable stages
20:33:35.402 INFO  DAGScheduler - running: HashSet()
20:33:35.402 INFO  DAGScheduler - waiting: HashSet(ResultStage 160)
20:33:35.402 INFO  DAGScheduler - failed: HashSet()
20:33:35.402 INFO  DAGScheduler - Submitting ResultStage 160 (MapPartitionsRDD[749] at saveAsTextFile at SamSink.java:65), which has no missing parents
20:33:35.411 INFO  MemoryStore - Block broadcast_307 stored as values in memory (estimated size 241.1 KiB, free 1918.2 MiB)
20:33:35.412 INFO  MemoryStore - Block broadcast_307_piece0 stored as bytes in memory (estimated size 66.9 KiB, free 1918.2 MiB)
20:33:35.412 INFO  BlockManagerInfo - Added broadcast_307_piece0 in memory on localhost:45281 (size: 66.9 KiB, free: 1919.7 MiB)
20:33:35.412 INFO  SparkContext - Created broadcast 307 from broadcast at DAGScheduler.scala:1580
20:33:35.412 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 160 (MapPartitionsRDD[749] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
20:33:35.412 INFO  TaskSchedulerImpl - Adding task set 160.0 with 1 tasks resource profile 0
20:33:35.413 INFO  TaskSetManager - Starting task 0.0 in stage 160.0 (TID 216) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:35.413 INFO  Executor - Running task 0.0 in stage 160.0 (TID 216)
20:33:35.417 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:35.417 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:35.428 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:35.428 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:35.428 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:35.446 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203335104143475025855421_0749_m_000000_0' to file:/tmp/ReadsSparkSinkUnitTest69121311933460631841.sam.parts/_temporary/0/task_20250715203335104143475025855421_0749_m_000000
20:33:35.446 INFO  SparkHadoopMapRedUtil - attempt_20250715203335104143475025855421_0749_m_000000_0: Committed. Elapsed time: 0 ms.
20:33:35.446 INFO  Executor - Finished task 0.0 in stage 160.0 (TID 216). 1858 bytes result sent to driver
20:33:35.447 INFO  TaskSetManager - Finished task 0.0 in stage 160.0 (TID 216) in 34 ms on localhost (executor driver) (1/1)
20:33:35.447 INFO  TaskSchedulerImpl - Removed TaskSet 160.0, whose tasks have all completed, from pool 
20:33:35.447 INFO  DAGScheduler - ResultStage 160 (runJob at SparkHadoopWriter.scala:83) finished in 0.045 s
20:33:35.447 INFO  DAGScheduler - Job 115 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.447 INFO  TaskSchedulerImpl - Killing all running tasks in stage 160: Stage finished
20:33:35.447 INFO  DAGScheduler - Job 115 finished: runJob at SparkHadoopWriter.scala:83, took 0.112236 s
20:33:35.447 INFO  SparkHadoopWriter - Start to commit write Job job_20250715203335104143475025855421_0749.
20:33:35.452 INFO  SparkHadoopWriter - Write Job job_20250715203335104143475025855421_0749 committed. Elapsed time: 4 ms.
20:33:35.459 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/ReadsSparkSinkUnitTest69121311933460631841.sam
20:33:35.464 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest69121311933460631841.sam done
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:35.467 INFO  MemoryStore - Block broadcast_308 stored as values in memory (estimated size 160.7 KiB, free 1918.0 MiB)
20:33:35.468 INFO  MemoryStore - Block broadcast_308_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.0 MiB)
20:33:35.468 INFO  BlockManagerInfo - Added broadcast_308_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:35.468 INFO  SparkContext - Created broadcast 308 from broadcast at SamSource.java:78
20:33:35.469 INFO  MemoryStore - Block broadcast_309 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:35.475 INFO  MemoryStore - Block broadcast_309_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:35.475 INFO  BlockManagerInfo - Added broadcast_309_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:35.475 INFO  SparkContext - Created broadcast 309 from newAPIHadoopFile at SamSource.java:108
20:33:35.478 INFO  FileInputFormat - Total input files to process : 1
20:33:35.481 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:35.482 INFO  DAGScheduler - Got job 116 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:35.482 INFO  DAGScheduler - Final stage: ResultStage 161 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:35.482 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.482 INFO  DAGScheduler - Missing parents: List()
20:33:35.482 INFO  DAGScheduler - Submitting ResultStage 161 (MapPartitionsRDD[754] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:35.482 INFO  MemoryStore - Block broadcast_310 stored as values in memory (estimated size 7.5 KiB, free 1917.7 MiB)
20:33:35.483 INFO  MemoryStore - Block broadcast_310_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1917.7 MiB)
20:33:35.483 INFO  BlockManagerInfo - Added broadcast_310_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.6 MiB)
20:33:35.483 INFO  SparkContext - Created broadcast 310 from broadcast at DAGScheduler.scala:1580
20:33:35.483 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 161 (MapPartitionsRDD[754] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:35.483 INFO  TaskSchedulerImpl - Adding task set 161.0 with 1 tasks resource profile 0
20:33:35.484 INFO  TaskSetManager - Starting task 0.0 in stage 161.0 (TID 217) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:35.484 INFO  Executor - Running task 0.0 in stage 161.0 (TID 217)
20:33:35.485 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest69121311933460631841.sam:0+847558
20:33:35.497 INFO  Executor - Finished task 0.0 in stage 161.0 (TID 217). 651483 bytes result sent to driver
20:33:35.500 INFO  TaskSetManager - Finished task 0.0 in stage 161.0 (TID 217) in 15 ms on localhost (executor driver) (1/1)
20:33:35.500 INFO  TaskSchedulerImpl - Removed TaskSet 161.0, whose tasks have all completed, from pool 
20:33:35.500 INFO  DAGScheduler - ResultStage 161 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.018 s
20:33:35.500 INFO  DAGScheduler - Job 116 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.500 INFO  TaskSchedulerImpl - Killing all running tasks in stage 161: Stage finished
20:33:35.500 INFO  DAGScheduler - Job 116 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.018624 s
20:33:35.509 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:35.510 INFO  DAGScheduler - Got job 117 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:35.510 INFO  DAGScheduler - Final stage: ResultStage 162 (count at ReadsSparkSinkUnitTest.java:185)
20:33:35.510 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.510 INFO  DAGScheduler - Missing parents: List()
20:33:35.510 INFO  DAGScheduler - Submitting ResultStage 162 (MapPartitionsRDD[736] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:35.532 INFO  MemoryStore - Block broadcast_311 stored as values in memory (estimated size 426.1 KiB, free 1917.2 MiB)
20:33:35.534 INFO  MemoryStore - Block broadcast_311_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.1 MiB)
20:33:35.534 INFO  BlockManagerInfo - Added broadcast_311_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:35.534 INFO  SparkContext - Created broadcast 311 from broadcast at DAGScheduler.scala:1580
20:33:35.534 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 162 (MapPartitionsRDD[736] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:35.534 INFO  TaskSchedulerImpl - Adding task set 162.0 with 1 tasks resource profile 0
20:33:35.535 INFO  TaskSetManager - Starting task 0.0 in stage 162.0 (TID 218) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:35.535 INFO  Executor - Running task 0.0 in stage 162.0 (TID 218)
20:33:35.573 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:35.582 INFO  Executor - Finished task 0.0 in stage 162.0 (TID 218). 989 bytes result sent to driver
20:33:35.583 INFO  TaskSetManager - Finished task 0.0 in stage 162.0 (TID 218) in 49 ms on localhost (executor driver) (1/1)
20:33:35.583 INFO  TaskSchedulerImpl - Removed TaskSet 162.0, whose tasks have all completed, from pool 
20:33:35.583 INFO  DAGScheduler - ResultStage 162 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.073 s
20:33:35.583 INFO  DAGScheduler - Job 117 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.583 INFO  TaskSchedulerImpl - Killing all running tasks in stage 162: Stage finished
20:33:35.583 INFO  DAGScheduler - Job 117 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.073480 s
20:33:35.586 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:35.587 INFO  DAGScheduler - Got job 118 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:35.587 INFO  DAGScheduler - Final stage: ResultStage 163 (count at ReadsSparkSinkUnitTest.java:185)
20:33:35.587 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.587 INFO  DAGScheduler - Missing parents: List()
20:33:35.587 INFO  DAGScheduler - Submitting ResultStage 163 (MapPartitionsRDD[754] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:35.587 INFO  MemoryStore - Block broadcast_312 stored as values in memory (estimated size 7.4 KiB, free 1917.1 MiB)
20:33:35.588 INFO  MemoryStore - Block broadcast_312_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1917.1 MiB)
20:33:35.588 INFO  BlockManagerInfo - Added broadcast_312_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.4 MiB)
20:33:35.588 INFO  SparkContext - Created broadcast 312 from broadcast at DAGScheduler.scala:1580
20:33:35.588 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 163 (MapPartitionsRDD[754] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:35.588 INFO  TaskSchedulerImpl - Adding task set 163.0 with 1 tasks resource profile 0
20:33:35.589 INFO  TaskSetManager - Starting task 0.0 in stage 163.0 (TID 219) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:35.589 INFO  Executor - Running task 0.0 in stage 163.0 (TID 219)
20:33:35.590 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest69121311933460631841.sam:0+847558
20:33:35.597 INFO  Executor - Finished task 0.0 in stage 163.0 (TID 219). 946 bytes result sent to driver
20:33:35.598 INFO  TaskSetManager - Finished task 0.0 in stage 163.0 (TID 219) in 9 ms on localhost (executor driver) (1/1)
20:33:35.598 INFO  TaskSchedulerImpl - Removed TaskSet 163.0, whose tasks have all completed, from pool 
20:33:35.598 INFO  DAGScheduler - ResultStage 163 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.011 s
20:33:35.598 INFO  DAGScheduler - Job 118 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.598 INFO  TaskSchedulerImpl - Killing all running tasks in stage 163: Stage finished
20:33:35.598 INFO  DAGScheduler - Job 118 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.011604 s
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:35.601 INFO  MemoryStore - Block broadcast_313 stored as values in memory (estimated size 21.0 KiB, free 1917.1 MiB)
20:33:35.601 INFO  MemoryStore - Block broadcast_313_piece0 stored as bytes in memory (estimated size 2.4 KiB, free 1917.1 MiB)
20:33:35.601 INFO  BlockManagerInfo - Added broadcast_313_piece0 in memory on localhost:45281 (size: 2.4 KiB, free: 1919.4 MiB)
20:33:35.601 INFO  SparkContext - Created broadcast 313 from broadcast at SamSource.java:78
20:33:35.602 INFO  MemoryStore - Block broadcast_314 stored as values in memory (estimated size 298.0 KiB, free 1916.8 MiB)
20:33:35.611 INFO  BlockManagerInfo - Removed broadcast_305_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.5 MiB)
20:33:35.612 INFO  BlockManagerInfo - Removed broadcast_306_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.6 MiB)
20:33:35.612 INFO  BlockManagerInfo - Removed broadcast_303_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:35.613 INFO  BlockManagerInfo - Removed broadcast_304_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:35.613 INFO  BlockManagerInfo - Removed broadcast_307_piece0 on localhost:45281 in memory (size: 66.9 KiB, free: 1919.8 MiB)
20:33:35.615 INFO  BlockManagerInfo - Removed broadcast_309_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:35.615 INFO  BlockManagerInfo - Removed broadcast_311_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1920.0 MiB)
20:33:35.615 INFO  BlockManagerInfo - Removed broadcast_310_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1920.0 MiB)
20:33:35.616 INFO  BlockManagerInfo - Removed broadcast_308_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1920.0 MiB)
20:33:35.616 INFO  BlockManagerInfo - Removed broadcast_312_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1920.0 MiB)
20:33:35.617 INFO  MemoryStore - Block broadcast_314_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1919.6 MiB)
20:33:35.617 INFO  BlockManagerInfo - Added broadcast_314_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.9 MiB)
20:33:35.617 INFO  SparkContext - Created broadcast 314 from newAPIHadoopFile at SamSource.java:108
20:33:35.621 INFO  FileInputFormat - Total input files to process : 1
20:33:35.625 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
20:33:35.625 INFO  DAGScheduler - Got job 119 (collect at SparkUtils.java:205) with 1 output partitions
20:33:35.625 INFO  DAGScheduler - Final stage: ResultStage 164 (collect at SparkUtils.java:205)
20:33:35.625 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.625 INFO  DAGScheduler - Missing parents: List()
20:33:35.625 INFO  DAGScheduler - Submitting ResultStage 164 (MapPartitionsRDD[760] at mapPartitions at SparkUtils.java:188), which has no missing parents
20:33:35.626 INFO  MemoryStore - Block broadcast_315 stored as values in memory (estimated size 7.9 KiB, free 1919.6 MiB)
20:33:35.626 INFO  MemoryStore - Block broadcast_315_piece0 stored as bytes in memory (estimated size 3.9 KiB, free 1919.6 MiB)
20:33:35.626 INFO  BlockManagerInfo - Added broadcast_315_piece0 in memory on localhost:45281 (size: 3.9 KiB, free: 1919.9 MiB)
20:33:35.627 INFO  SparkContext - Created broadcast 315 from broadcast at DAGScheduler.scala:1580
20:33:35.627 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 164 (MapPartitionsRDD[760] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
20:33:35.627 INFO  TaskSchedulerImpl - Adding task set 164.0 with 1 tasks resource profile 0
20:33:35.627 INFO  TaskSetManager - Starting task 0.0 in stage 164.0 (TID 220) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7936 bytes) 
20:33:35.628 INFO  Executor - Running task 0.0 in stage 164.0 (TID 220)
20:33:35.629 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/engine/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.10000000-10000020.with.unmapped.queryname.samtools.sam:0+224884
20:33:35.631 INFO  Executor - Finished task 0.0 in stage 164.0 (TID 220). 1700 bytes result sent to driver
20:33:35.632 INFO  TaskSetManager - Finished task 0.0 in stage 164.0 (TID 220) in 5 ms on localhost (executor driver) (1/1)
20:33:35.633 INFO  TaskSchedulerImpl - Removed TaskSet 164.0, whose tasks have all completed, from pool 
20:33:35.633 INFO  DAGScheduler - ResultStage 164 (collect at SparkUtils.java:205) finished in 0.008 s
20:33:35.633 INFO  DAGScheduler - Job 119 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.633 INFO  TaskSchedulerImpl - Killing all running tasks in stage 164: Stage finished
20:33:35.633 INFO  DAGScheduler - Job 119 finished: collect at SparkUtils.java:205, took 0.008070 s
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:35.637 INFO  MemoryStore - Block broadcast_316 stored as values in memory (estimated size 21.0 KiB, free 1919.6 MiB)
20:33:35.638 INFO  MemoryStore - Block broadcast_316_piece0 stored as bytes in memory (estimated size 2.4 KiB, free 1919.6 MiB)
20:33:35.638 INFO  BlockManagerInfo - Added broadcast_316_piece0 in memory on localhost:45281 (size: 2.4 KiB, free: 1919.9 MiB)
20:33:35.638 INFO  SparkContext - Created broadcast 316 from broadcast at SamSource.java:78
20:33:35.639 INFO  MemoryStore - Block broadcast_317 stored as values in memory (estimated size 298.0 KiB, free 1919.3 MiB)
20:33:35.645 INFO  MemoryStore - Block broadcast_317_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1919.3 MiB)
20:33:35.645 INFO  BlockManagerInfo - Added broadcast_317_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.9 MiB)
20:33:35.645 INFO  SparkContext - Created broadcast 317 from newAPIHadoopFile at SamSource.java:108
20:33:35.647 INFO  MemoryStore - Block broadcast_318 stored as values in memory (estimated size 21.0 KiB, free 1919.2 MiB)
20:33:35.647 INFO  MemoryStore - Block broadcast_318_piece0 stored as bytes in memory (estimated size 2.4 KiB, free 1919.2 MiB)
20:33:35.647 INFO  BlockManagerInfo - Added broadcast_318_piece0 in memory on localhost:45281 (size: 2.4 KiB, free: 1919.9 MiB)
20:33:35.648 INFO  SparkContext - Created broadcast 318 from broadcast at ReadsSparkSink.java:133
20:33:35.648 INFO  MemoryStore - Block broadcast_319 stored as values in memory (estimated size 21.5 KiB, free 1919.2 MiB)
20:33:35.649 INFO  MemoryStore - Block broadcast_319_piece0 stored as bytes in memory (estimated size 2.4 KiB, free 1919.2 MiB)
20:33:35.649 INFO  BlockManagerInfo - Added broadcast_319_piece0 in memory on localhost:45281 (size: 2.4 KiB, free: 1919.9 MiB)
20:33:35.649 INFO  SparkContext - Created broadcast 319 from broadcast at BamSink.java:76
20:33:35.651 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:35.651 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:35.651 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:35.668 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:35.668 INFO  DAGScheduler - Got job 120 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:35.668 INFO  DAGScheduler - Final stage: ResultStage 165 (runJob at SparkHadoopWriter.scala:83)
20:33:35.668 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.669 INFO  DAGScheduler - Missing parents: List()
20:33:35.669 INFO  DAGScheduler - Submitting ResultStage 165 (MapPartitionsRDD[770] at mapToPair at BamSink.java:91), which has no missing parents
20:33:35.675 INFO  MemoryStore - Block broadcast_320 stored as values in memory (estimated size 152.3 KiB, free 1919.1 MiB)
20:33:35.676 INFO  MemoryStore - Block broadcast_320_piece0 stored as bytes in memory (estimated size 56.4 KiB, free 1919.0 MiB)
20:33:35.676 INFO  BlockManagerInfo - Added broadcast_320_piece0 in memory on localhost:45281 (size: 56.4 KiB, free: 1919.8 MiB)
20:33:35.676 INFO  SparkContext - Created broadcast 320 from broadcast at DAGScheduler.scala:1580
20:33:35.676 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 165 (MapPartitionsRDD[770] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:35.676 INFO  TaskSchedulerImpl - Adding task set 165.0 with 1 tasks resource profile 0
20:33:35.677 INFO  TaskSetManager - Starting task 0.0 in stage 165.0 (TID 221) (localhost, executor driver, partition 0, PROCESS_LOCAL, 8561 bytes) 
20:33:35.677 INFO  Executor - Running task 0.0 in stage 165.0 (TID 221)
20:33:35.681 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/engine/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.10000000-10000020.with.unmapped.queryname.samtools.sam:0+224884
20:33:35.684 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:35.684 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:35.684 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:35.684 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:35.684 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:35.684 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:35.710 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033351116096479415698415_0770_r_000000_0' to file:/tmp/ReadsSparkSinkNotSorting10894445089451074782.bam.parts/_temporary/0/task_202507152033351116096479415698415_0770_r_000000
20:33:35.710 INFO  SparkHadoopMapRedUtil - attempt_202507152033351116096479415698415_0770_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:35.711 INFO  Executor - Finished task 0.0 in stage 165.0 (TID 221). 1084 bytes result sent to driver
20:33:35.711 INFO  TaskSetManager - Finished task 0.0 in stage 165.0 (TID 221) in 34 ms on localhost (executor driver) (1/1)
20:33:35.711 INFO  TaskSchedulerImpl - Removed TaskSet 165.0, whose tasks have all completed, from pool 
20:33:35.711 INFO  DAGScheduler - ResultStage 165 (runJob at SparkHadoopWriter.scala:83) finished in 0.042 s
20:33:35.711 INFO  DAGScheduler - Job 120 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.711 INFO  TaskSchedulerImpl - Killing all running tasks in stage 165: Stage finished
20:33:35.711 INFO  DAGScheduler - Job 120 finished: runJob at SparkHadoopWriter.scala:83, took 0.043338 s
20:33:35.712 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033351116096479415698415_0770.
20:33:35.717 INFO  SparkHadoopWriter - Write Job job_202507152033351116096479415698415_0770 committed. Elapsed time: 5 ms.
20:33:35.729 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkNotSorting10894445089451074782.bam
20:33:35.733 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkNotSorting10894445089451074782.bam done
20:33:35.734 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkNotSorting10894445089451074782.bam.parts/ to /tmp/ReadsSparkSinkNotSorting10894445089451074782.bam.sbi
20:33:35.738 INFO  IndexFileMerger - Done merging .sbi files
20:33:35.739 INFO  MemoryStore - Block broadcast_321 stored as values in memory (estimated size 192.0 B, free 1919.0 MiB)
20:33:35.740 INFO  MemoryStore - Block broadcast_321_piece0 stored as bytes in memory (estimated size 127.0 B, free 1919.0 MiB)
20:33:35.740 INFO  BlockManagerInfo - Added broadcast_321_piece0 in memory on localhost:45281 (size: 127.0 B, free: 1919.8 MiB)
20:33:35.740 INFO  SparkContext - Created broadcast 321 from broadcast at BamSource.java:104
20:33:35.741 INFO  MemoryStore - Block broadcast_322 stored as values in memory (estimated size 297.9 KiB, free 1918.7 MiB)
20:33:35.747 INFO  MemoryStore - Block broadcast_322_piece0 stored as bytes in memory (estimated size 50.1 KiB, free 1918.7 MiB)
20:33:35.747 INFO  BlockManagerInfo - Added broadcast_322_piece0 in memory on localhost:45281 (size: 50.1 KiB, free: 1919.8 MiB)
20:33:35.748 INFO  SparkContext - Created broadcast 322 from newAPIHadoopFile at PathSplitSource.java:96
20:33:35.757 INFO  FileInputFormat - Total input files to process : 1
20:33:35.771 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
20:33:35.772 INFO  DAGScheduler - Got job 121 (collect at SparkUtils.java:205) with 1 output partitions
20:33:35.772 INFO  DAGScheduler - Final stage: ResultStage 166 (collect at SparkUtils.java:205)
20:33:35.772 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.772 INFO  DAGScheduler - Missing parents: List()
20:33:35.772 INFO  DAGScheduler - Submitting ResultStage 166 (MapPartitionsRDD[777] at mapPartitions at SparkUtils.java:188), which has no missing parents
20:33:35.778 INFO  MemoryStore - Block broadcast_323 stored as values in memory (estimated size 148.6 KiB, free 1918.5 MiB)
20:33:35.779 INFO  MemoryStore - Block broadcast_323_piece0 stored as bytes in memory (estimated size 54.7 KiB, free 1918.5 MiB)
20:33:35.779 INFO  BlockManagerInfo - Added broadcast_323_piece0 in memory on localhost:45281 (size: 54.7 KiB, free: 1919.7 MiB)
20:33:35.779 INFO  SparkContext - Created broadcast 323 from broadcast at DAGScheduler.scala:1580
20:33:35.779 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 166 (MapPartitionsRDD[777] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
20:33:35.779 INFO  TaskSchedulerImpl - Adding task set 166.0 with 1 tasks resource profile 0
20:33:35.780 INFO  TaskSetManager - Starting task 0.0 in stage 166.0 (TID 222) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7811 bytes) 
20:33:35.780 INFO  Executor - Running task 0.0 in stage 166.0 (TID 222)
20:33:35.792 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkNotSorting10894445089451074782.bam:0+59395
20:33:35.793 INFO  Executor - Finished task 0.0 in stage 166.0 (TID 222). 1700 bytes result sent to driver
20:33:35.794 INFO  TaskSetManager - Finished task 0.0 in stage 166.0 (TID 222) in 14 ms on localhost (executor driver) (1/1)
20:33:35.794 INFO  TaskSchedulerImpl - Removed TaskSet 166.0, whose tasks have all completed, from pool 
20:33:35.794 INFO  DAGScheduler - ResultStage 166 (collect at SparkUtils.java:205) finished in 0.022 s
20:33:35.794 INFO  DAGScheduler - Job 121 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.794 INFO  TaskSchedulerImpl - Killing all running tasks in stage 166: Stage finished
20:33:35.794 INFO  DAGScheduler - Job 121 finished: collect at SparkUtils.java:205, took 0.022561 s
20:33:35.810 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:91
20:33:35.810 INFO  DAGScheduler - Got job 122 (collect at ReadsSparkSinkUnitTest.java:91) with 1 output partitions
20:33:35.810 INFO  DAGScheduler - Final stage: ResultStage 167 (collect at ReadsSparkSinkUnitTest.java:91)
20:33:35.810 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.810 INFO  DAGScheduler - Missing parents: List()
20:33:35.811 INFO  DAGScheduler - Submitting ResultStage 167 (ZippedPartitionsRDD2[780] at zipPartitions at SparkUtils.java:244), which has no missing parents
20:33:35.820 INFO  MemoryStore - Block broadcast_324 stored as values in memory (estimated size 149.8 KiB, free 1918.3 MiB)
20:33:35.821 INFO  MemoryStore - Block broadcast_324_piece0 stored as bytes in memory (estimated size 55.2 KiB, free 1918.3 MiB)
20:33:35.821 INFO  BlockManagerInfo - Added broadcast_324_piece0 in memory on localhost:45281 (size: 55.2 KiB, free: 1919.7 MiB)
20:33:35.821 INFO  SparkContext - Created broadcast 324 from broadcast at DAGScheduler.scala:1580
20:33:35.821 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 167 (ZippedPartitionsRDD2[780] at zipPartitions at SparkUtils.java:244) (first 15 tasks are for partitions Vector(0))
20:33:35.821 INFO  TaskSchedulerImpl - Adding task set 167.0 with 1 tasks resource profile 0
20:33:35.822 INFO  TaskSetManager - Starting task 0.0 in stage 167.0 (TID 223) (localhost, executor driver, partition 0, PROCESS_LOCAL, 8436 bytes) 
20:33:35.822 INFO  Executor - Running task 0.0 in stage 167.0 (TID 223)
20:33:35.834 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkNotSorting10894445089451074782.bam:0+59395
20:33:35.835 INFO  Executor - Finished task 0.0 in stage 167.0 (TID 223). 192451 bytes result sent to driver
20:33:35.836 INFO  TaskSetManager - Finished task 0.0 in stage 167.0 (TID 223) in 14 ms on localhost (executor driver) (1/1)
20:33:35.836 INFO  TaskSchedulerImpl - Removed TaskSet 167.0, whose tasks have all completed, from pool 
20:33:35.836 INFO  DAGScheduler - ResultStage 167 (collect at ReadsSparkSinkUnitTest.java:91) finished in 0.025 s
20:33:35.836 INFO  DAGScheduler - Job 122 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.836 INFO  TaskSchedulerImpl - Killing all running tasks in stage 167: Stage finished
20:33:35.837 INFO  DAGScheduler - Job 122 finished: collect at ReadsSparkSinkUnitTest.java:91, took 0.026427 s
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
WARNING	2025-07-15 20:33:35	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:35.839 INFO  MemoryStore - Block broadcast_325 stored as values in memory (estimated size 21.0 KiB, free 1918.3 MiB)
20:33:35.843 INFO  MemoryStore - Block broadcast_325_piece0 stored as bytes in memory (estimated size 2.4 KiB, free 1918.3 MiB)
20:33:35.843 INFO  BlockManagerInfo - Added broadcast_325_piece0 in memory on localhost:45281 (size: 2.4 KiB, free: 1919.7 MiB)
20:33:35.843 INFO  BlockManagerInfo - Removed broadcast_319_piece0 on localhost:45281 in memory (size: 2.4 KiB, free: 1919.7 MiB)
20:33:35.844 INFO  SparkContext - Created broadcast 325 from broadcast at SamSource.java:78
20:33:35.844 INFO  BlockManagerInfo - Removed broadcast_318_piece0 on localhost:45281 in memory (size: 2.4 KiB, free: 1919.7 MiB)
20:33:35.844 INFO  BlockManagerInfo - Removed broadcast_315_piece0 on localhost:45281 in memory (size: 3.9 KiB, free: 1919.7 MiB)
20:33:35.845 INFO  MemoryStore - Block broadcast_326 stored as values in memory (estimated size 298.0 KiB, free 1918.2 MiB)
20:33:35.845 INFO  BlockManagerInfo - Removed broadcast_324_piece0 on localhost:45281 in memory (size: 55.2 KiB, free: 1919.7 MiB)
20:33:35.845 INFO  BlockManagerInfo - Removed broadcast_323_piece0 on localhost:45281 in memory (size: 54.7 KiB, free: 1919.8 MiB)
20:33:35.846 INFO  BlockManagerInfo - Removed broadcast_320_piece0 on localhost:45281 in memory (size: 56.4 KiB, free: 1919.8 MiB)
20:33:35.846 INFO  BlockManagerInfo - Removed broadcast_317_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.9 MiB)
20:33:35.847 INFO  BlockManagerInfo - Removed broadcast_316_piece0 on localhost:45281 in memory (size: 2.4 KiB, free: 1919.9 MiB)
20:33:35.852 INFO  MemoryStore - Block broadcast_326_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1918.9 MiB)
20:33:35.852 INFO  BlockManagerInfo - Added broadcast_326_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.8 MiB)
20:33:35.852 INFO  SparkContext - Created broadcast 326 from newAPIHadoopFile at SamSource.java:108
20:33:35.855 INFO  FileInputFormat - Total input files to process : 1
20:33:35.859 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
20:33:35.859 INFO  DAGScheduler - Got job 123 (collect at SparkUtils.java:205) with 1 output partitions
20:33:35.859 INFO  DAGScheduler - Final stage: ResultStage 168 (collect at SparkUtils.java:205)
20:33:35.859 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.859 INFO  DAGScheduler - Missing parents: List()
20:33:35.859 INFO  DAGScheduler - Submitting ResultStage 168 (MapPartitionsRDD[786] at mapPartitions at SparkUtils.java:188), which has no missing parents
20:33:35.860 INFO  MemoryStore - Block broadcast_327 stored as values in memory (estimated size 7.9 KiB, free 1918.9 MiB)
20:33:35.860 INFO  MemoryStore - Block broadcast_327_piece0 stored as bytes in memory (estimated size 3.9 KiB, free 1918.9 MiB)
20:33:35.860 INFO  BlockManagerInfo - Added broadcast_327_piece0 in memory on localhost:45281 (size: 3.9 KiB, free: 1919.8 MiB)
20:33:35.860 INFO  SparkContext - Created broadcast 327 from broadcast at DAGScheduler.scala:1580
20:33:35.860 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 168 (MapPartitionsRDD[786] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
20:33:35.860 INFO  TaskSchedulerImpl - Adding task set 168.0 with 1 tasks resource profile 0
20:33:35.861 INFO  TaskSetManager - Starting task 0.0 in stage 168.0 (TID 224) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7936 bytes) 
20:33:35.861 INFO  Executor - Running task 0.0 in stage 168.0 (TID 224)
20:33:35.862 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/engine/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.10000000-10000020.with.unmapped.queryname.samtools.sam:0+224884
20:33:35.864 INFO  Executor - Finished task 0.0 in stage 168.0 (TID 224). 1657 bytes result sent to driver
20:33:35.865 INFO  TaskSetManager - Finished task 0.0 in stage 168.0 (TID 224) in 4 ms on localhost (executor driver) (1/1)
20:33:35.865 INFO  TaskSchedulerImpl - Removed TaskSet 168.0, whose tasks have all completed, from pool 
20:33:35.865 INFO  DAGScheduler - ResultStage 168 (collect at SparkUtils.java:205) finished in 0.006 s
20:33:35.865 INFO  DAGScheduler - Job 123 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.865 INFO  TaskSchedulerImpl - Killing all running tasks in stage 168: Stage finished
20:33:35.865 INFO  DAGScheduler - Job 123 finished: collect at SparkUtils.java:205, took 0.006322 s
20:33:35.870 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:94
20:33:35.871 INFO  DAGScheduler - Got job 124 (collect at ReadsSparkSinkUnitTest.java:94) with 1 output partitions
20:33:35.871 INFO  DAGScheduler - Final stage: ResultStage 169 (collect at ReadsSparkSinkUnitTest.java:94)
20:33:35.871 INFO  DAGScheduler - Parents of final stage: List()
20:33:35.871 INFO  DAGScheduler - Missing parents: List()
20:33:35.871 INFO  DAGScheduler - Submitting ResultStage 169 (ZippedPartitionsRDD2[789] at zipPartitions at SparkUtils.java:244), which has no missing parents
20:33:35.871 INFO  MemoryStore - Block broadcast_328 stored as values in memory (estimated size 9.6 KiB, free 1918.9 MiB)
20:33:35.872 INFO  MemoryStore - Block broadcast_328_piece0 stored as bytes in memory (estimated size 4.4 KiB, free 1918.9 MiB)
20:33:35.872 INFO  BlockManagerInfo - Added broadcast_328_piece0 in memory on localhost:45281 (size: 4.4 KiB, free: 1919.8 MiB)
20:33:35.872 INFO  SparkContext - Created broadcast 328 from broadcast at DAGScheduler.scala:1580
20:33:35.872 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 169 (ZippedPartitionsRDD2[789] at zipPartitions at SparkUtils.java:244) (first 15 tasks are for partitions Vector(0))
20:33:35.872 INFO  TaskSchedulerImpl - Adding task set 169.0 with 1 tasks resource profile 0
20:33:35.873 INFO  TaskSetManager - Starting task 0.0 in stage 169.0 (TID 225) (localhost, executor driver, partition 0, PROCESS_LOCAL, 8561 bytes) 
20:33:35.873 INFO  Executor - Running task 0.0 in stage 169.0 (TID 225)
20:33:35.874 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/engine/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.10000000-10000020.with.unmapped.queryname.samtools.sam:0+224884
20:33:35.883 INFO  Executor - Finished task 0.0 in stage 169.0 (TID 225). 192494 bytes result sent to driver
20:33:35.884 INFO  TaskSetManager - Finished task 0.0 in stage 169.0 (TID 225) in 11 ms on localhost (executor driver) (1/1)
20:33:35.884 INFO  TaskSchedulerImpl - Removed TaskSet 169.0, whose tasks have all completed, from pool 
20:33:35.884 INFO  DAGScheduler - ResultStage 169 (collect at ReadsSparkSinkUnitTest.java:94) finished in 0.013 s
20:33:35.884 INFO  DAGScheduler - Job 124 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:35.884 INFO  TaskSchedulerImpl - Killing all running tasks in stage 169: Stage finished
20:33:35.884 INFO  DAGScheduler - Job 124 finished: collect at ReadsSparkSinkUnitTest.java:94, took 0.013392 s
20:33:35.893 INFO  MemoryStore - Block broadcast_329 stored as values in memory (estimated size 297.9 KiB, free 1918.6 MiB)
20:33:35.899 INFO  MemoryStore - Block broadcast_329_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.6 MiB)
20:33:35.899 INFO  BlockManagerInfo - Added broadcast_329_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:35.900 INFO  SparkContext - Created broadcast 329 from newAPIHadoopFile at PathSplitSource.java:96
20:33:35.922 INFO  MemoryStore - Block broadcast_330 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:35.929 INFO  MemoryStore - Block broadcast_330_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:35.929 INFO  BlockManagerInfo - Added broadcast_330_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:35.929 INFO  SparkContext - Created broadcast 330 from newAPIHadoopFile at PathSplitSource.java:96
20:33:35.949 INFO  FileInputFormat - Total input files to process : 1
20:33:35.951 INFO  MemoryStore - Block broadcast_331 stored as values in memory (estimated size 160.7 KiB, free 1918.1 MiB)
20:33:35.952 INFO  MemoryStore - Block broadcast_331_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.1 MiB)
20:33:35.953 INFO  BlockManagerInfo - Added broadcast_331_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:35.953 INFO  SparkContext - Created broadcast 331 from broadcast at ReadsSparkSink.java:133
20:33:35.954 INFO  MemoryStore - Block broadcast_332 stored as values in memory (estimated size 163.2 KiB, free 1917.9 MiB)
20:33:35.955 INFO  MemoryStore - Block broadcast_332_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.9 MiB)
20:33:35.955 INFO  BlockManagerInfo - Added broadcast_332_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:35.955 INFO  SparkContext - Created broadcast 332 from broadcast at BamSink.java:76
20:33:35.957 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:35.957 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:35.957 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:35.974 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:35.975 INFO  DAGScheduler - Registering RDD 803 (mapToPair at SparkUtils.java:161) as input to shuffle 34
20:33:35.975 INFO  DAGScheduler - Got job 125 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:35.975 INFO  DAGScheduler - Final stage: ResultStage 171 (runJob at SparkHadoopWriter.scala:83)
20:33:35.975 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 170)
20:33:35.975 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 170)
20:33:35.975 INFO  DAGScheduler - Submitting ShuffleMapStage 170 (MapPartitionsRDD[803] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:35.993 INFO  MemoryStore - Block broadcast_333 stored as values in memory (estimated size 520.4 KiB, free 1917.4 MiB)
20:33:35.994 INFO  MemoryStore - Block broadcast_333_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.2 MiB)
20:33:35.994 INFO  BlockManagerInfo - Added broadcast_333_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.6 MiB)
20:33:35.994 INFO  SparkContext - Created broadcast 333 from broadcast at DAGScheduler.scala:1580
20:33:35.995 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 170 (MapPartitionsRDD[803] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:35.995 INFO  TaskSchedulerImpl - Adding task set 170.0 with 1 tasks resource profile 0
20:33:35.995 INFO  TaskSetManager - Starting task 0.0 in stage 170.0 (TID 226) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:35.995 INFO  Executor - Running task 0.0 in stage 170.0 (TID 226)
20:33:36.026 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:36.042 INFO  Executor - Finished task 0.0 in stage 170.0 (TID 226). 1148 bytes result sent to driver
20:33:36.043 INFO  TaskSetManager - Finished task 0.0 in stage 170.0 (TID 226) in 48 ms on localhost (executor driver) (1/1)
20:33:36.043 INFO  TaskSchedulerImpl - Removed TaskSet 170.0, whose tasks have all completed, from pool 
20:33:36.043 INFO  DAGScheduler - ShuffleMapStage 170 (mapToPair at SparkUtils.java:161) finished in 0.068 s
20:33:36.043 INFO  DAGScheduler - looking for newly runnable stages
20:33:36.043 INFO  DAGScheduler - running: HashSet()
20:33:36.043 INFO  DAGScheduler - waiting: HashSet(ResultStage 171)
20:33:36.043 INFO  DAGScheduler - failed: HashSet()
20:33:36.043 INFO  DAGScheduler - Submitting ResultStage 171 (MapPartitionsRDD[808] at mapToPair at BamSink.java:91), which has no missing parents
20:33:36.050 INFO  MemoryStore - Block broadcast_334 stored as values in memory (estimated size 241.4 KiB, free 1917.0 MiB)
20:33:36.051 INFO  MemoryStore - Block broadcast_334_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1916.9 MiB)
20:33:36.051 INFO  BlockManagerInfo - Added broadcast_334_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.5 MiB)
20:33:36.051 INFO  SparkContext - Created broadcast 334 from broadcast at DAGScheduler.scala:1580
20:33:36.052 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 171 (MapPartitionsRDD[808] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:36.052 INFO  TaskSchedulerImpl - Adding task set 171.0 with 1 tasks resource profile 0
20:33:36.052 INFO  TaskSetManager - Starting task 0.0 in stage 171.0 (TID 227) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:36.053 INFO  Executor - Running task 0.0 in stage 171.0 (TID 227)
20:33:36.057 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:36.057 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:36.068 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.068 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.068 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.068 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.068 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.068 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.095 INFO  FileOutputCommitter - Saved output of task 'attempt_2025071520333590845716148357742_0808_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest1.someOtherPlace13257041880669430884/_temporary/0/task_2025071520333590845716148357742_0808_r_000000
20:33:36.095 INFO  SparkHadoopMapRedUtil - attempt_2025071520333590845716148357742_0808_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:36.096 INFO  Executor - Finished task 0.0 in stage 171.0 (TID 227). 1858 bytes result sent to driver
20:33:36.096 INFO  TaskSetManager - Finished task 0.0 in stage 171.0 (TID 227) in 44 ms on localhost (executor driver) (1/1)
20:33:36.096 INFO  TaskSchedulerImpl - Removed TaskSet 171.0, whose tasks have all completed, from pool 
20:33:36.096 INFO  DAGScheduler - ResultStage 171 (runJob at SparkHadoopWriter.scala:83) finished in 0.052 s
20:33:36.096 INFO  DAGScheduler - Job 125 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.096 INFO  TaskSchedulerImpl - Killing all running tasks in stage 171: Stage finished
20:33:36.096 INFO  DAGScheduler - Job 125 finished: runJob at SparkHadoopWriter.scala:83, took 0.122084 s
20:33:36.097 INFO  SparkHadoopWriter - Start to commit write Job job_2025071520333590845716148357742_0808.
20:33:36.103 INFO  SparkHadoopWriter - Write Job job_2025071520333590845716148357742_0808 committed. Elapsed time: 5 ms.
20:33:36.115 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest16140905482774220785.bam
20:33:36.119 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest16140905482774220785.bam done
20:33:36.119 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest1.someOtherPlace13257041880669430884 to /tmp/ReadsSparkSinkUnitTest16140905482774220785.bam.sbi
20:33:36.124 INFO  IndexFileMerger - Done merging .sbi files
20:33:36.124 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest1.someOtherPlace13257041880669430884 to /tmp/ReadsSparkSinkUnitTest16140905482774220785.bam.bai
20:33:36.129 INFO  IndexFileMerger - Done merging .bai files
20:33:36.130 INFO  MemoryStore - Block broadcast_335 stored as values in memory (estimated size 320.0 B, free 1916.9 MiB)
20:33:36.131 INFO  MemoryStore - Block broadcast_335_piece0 stored as bytes in memory (estimated size 233.0 B, free 1916.9 MiB)
20:33:36.131 INFO  BlockManagerInfo - Added broadcast_335_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.5 MiB)
20:33:36.131 INFO  SparkContext - Created broadcast 335 from broadcast at BamSource.java:104
20:33:36.132 INFO  MemoryStore - Block broadcast_336 stored as values in memory (estimated size 297.9 KiB, free 1916.6 MiB)
20:33:36.139 INFO  BlockManagerInfo - Removed broadcast_325_piece0 on localhost:45281 in memory (size: 2.4 KiB, free: 1919.5 MiB)
20:33:36.140 INFO  BlockManagerInfo - Removed broadcast_313_piece0 on localhost:45281 in memory (size: 2.4 KiB, free: 1919.5 MiB)
20:33:36.140 INFO  BlockManagerInfo - Removed broadcast_330_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:36.141 INFO  BlockManagerInfo - Removed broadcast_334_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.6 MiB)
20:33:36.141 INFO  BlockManagerInfo - Removed broadcast_333_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.8 MiB)
20:33:36.142 INFO  BlockManagerInfo - Removed broadcast_331_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:36.142 INFO  BlockManagerInfo - Removed broadcast_332_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:36.143 INFO  BlockManagerInfo - Removed broadcast_321_piece0 on localhost:45281 in memory (size: 127.0 B, free: 1919.8 MiB)
20:33:36.143 INFO  BlockManagerInfo - Removed broadcast_328_piece0 on localhost:45281 in memory (size: 4.4 KiB, free: 1919.8 MiB)
20:33:36.144 INFO  BlockManagerInfo - Removed broadcast_314_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.8 MiB)
20:33:36.144 INFO  BlockManagerInfo - Removed broadcast_322_piece0 on localhost:45281 in memory (size: 50.1 KiB, free: 1919.9 MiB)
20:33:36.144 INFO  BlockManagerInfo - Removed broadcast_327_piece0 on localhost:45281 in memory (size: 3.9 KiB, free: 1919.9 MiB)
20:33:36.145 INFO  BlockManagerInfo - Removed broadcast_326_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1920.0 MiB)
20:33:36.146 INFO  MemoryStore - Block broadcast_336_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:36.146 INFO  BlockManagerInfo - Added broadcast_336_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:36.146 INFO  SparkContext - Created broadcast 336 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.155 INFO  FileInputFormat - Total input files to process : 1
20:33:36.169 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:36.170 INFO  DAGScheduler - Got job 126 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:36.170 INFO  DAGScheduler - Final stage: ResultStage 172 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:36.170 INFO  DAGScheduler - Parents of final stage: List()
20:33:36.170 INFO  DAGScheduler - Missing parents: List()
20:33:36.170 INFO  DAGScheduler - Submitting ResultStage 172 (MapPartitionsRDD[814] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:36.180 INFO  MemoryStore - Block broadcast_337 stored as values in memory (estimated size 148.2 KiB, free 1919.2 MiB)
20:33:36.181 INFO  MemoryStore - Block broadcast_337_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1919.1 MiB)
20:33:36.181 INFO  BlockManagerInfo - Added broadcast_337_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.8 MiB)
20:33:36.182 INFO  SparkContext - Created broadcast 337 from broadcast at DAGScheduler.scala:1580
20:33:36.182 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 172 (MapPartitionsRDD[814] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:36.182 INFO  TaskSchedulerImpl - Adding task set 172.0 with 1 tasks resource profile 0
20:33:36.182 INFO  TaskSetManager - Starting task 0.0 in stage 172.0 (TID 228) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:36.183 INFO  Executor - Running task 0.0 in stage 172.0 (TID 228)
20:33:36.199 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest16140905482774220785.bam:0+237038
20:33:36.203 INFO  Executor - Finished task 0.0 in stage 172.0 (TID 228). 651483 bytes result sent to driver
20:33:36.206 INFO  TaskSetManager - Finished task 0.0 in stage 172.0 (TID 228) in 24 ms on localhost (executor driver) (1/1)
20:33:36.206 INFO  TaskSchedulerImpl - Removed TaskSet 172.0, whose tasks have all completed, from pool 
20:33:36.206 INFO  DAGScheduler - ResultStage 172 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.036 s
20:33:36.206 INFO  DAGScheduler - Job 126 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.206 INFO  TaskSchedulerImpl - Killing all running tasks in stage 172: Stage finished
20:33:36.206 INFO  DAGScheduler - Job 126 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.036476 s
20:33:36.216 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:36.216 INFO  DAGScheduler - Got job 127 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:36.216 INFO  DAGScheduler - Final stage: ResultStage 173 (count at ReadsSparkSinkUnitTest.java:185)
20:33:36.216 INFO  DAGScheduler - Parents of final stage: List()
20:33:36.216 INFO  DAGScheduler - Missing parents: List()
20:33:36.217 INFO  DAGScheduler - Submitting ResultStage 173 (MapPartitionsRDD[796] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:36.233 INFO  MemoryStore - Block broadcast_338 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:36.235 INFO  MemoryStore - Block broadcast_338_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.6 MiB)
20:33:36.235 INFO  BlockManagerInfo - Added broadcast_338_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:36.235 INFO  SparkContext - Created broadcast 338 from broadcast at DAGScheduler.scala:1580
20:33:36.235 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 173 (MapPartitionsRDD[796] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:36.235 INFO  TaskSchedulerImpl - Adding task set 173.0 with 1 tasks resource profile 0
20:33:36.236 INFO  TaskSetManager - Starting task 0.0 in stage 173.0 (TID 229) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:36.236 INFO  Executor - Running task 0.0 in stage 173.0 (TID 229)
20:33:36.266 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:36.275 INFO  Executor - Finished task 0.0 in stage 173.0 (TID 229). 989 bytes result sent to driver
20:33:36.276 INFO  TaskSetManager - Finished task 0.0 in stage 173.0 (TID 229) in 40 ms on localhost (executor driver) (1/1)
20:33:36.276 INFO  TaskSchedulerImpl - Removed TaskSet 173.0, whose tasks have all completed, from pool 
20:33:36.276 INFO  DAGScheduler - ResultStage 173 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.059 s
20:33:36.276 INFO  DAGScheduler - Job 127 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.276 INFO  TaskSchedulerImpl - Killing all running tasks in stage 173: Stage finished
20:33:36.276 INFO  DAGScheduler - Job 127 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.060149 s
20:33:36.281 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:36.281 INFO  DAGScheduler - Got job 128 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:36.281 INFO  DAGScheduler - Final stage: ResultStage 174 (count at ReadsSparkSinkUnitTest.java:185)
20:33:36.281 INFO  DAGScheduler - Parents of final stage: List()
20:33:36.281 INFO  DAGScheduler - Missing parents: List()
20:33:36.281 INFO  DAGScheduler - Submitting ResultStage 174 (MapPartitionsRDD[814] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:36.291 INFO  MemoryStore - Block broadcast_339 stored as values in memory (estimated size 148.1 KiB, free 1918.4 MiB)
20:33:36.291 INFO  MemoryStore - Block broadcast_339_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.4 MiB)
20:33:36.291 INFO  BlockManagerInfo - Added broadcast_339_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.6 MiB)
20:33:36.292 INFO  SparkContext - Created broadcast 339 from broadcast at DAGScheduler.scala:1580
20:33:36.292 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 174 (MapPartitionsRDD[814] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:36.292 INFO  TaskSchedulerImpl - Adding task set 174.0 with 1 tasks resource profile 0
20:33:36.292 INFO  TaskSetManager - Starting task 0.0 in stage 174.0 (TID 230) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:36.292 INFO  Executor - Running task 0.0 in stage 174.0 (TID 230)
20:33:36.304 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest16140905482774220785.bam:0+237038
20:33:36.307 INFO  Executor - Finished task 0.0 in stage 174.0 (TID 230). 989 bytes result sent to driver
20:33:36.307 INFO  TaskSetManager - Finished task 0.0 in stage 174.0 (TID 230) in 15 ms on localhost (executor driver) (1/1)
20:33:36.307 INFO  TaskSchedulerImpl - Removed TaskSet 174.0, whose tasks have all completed, from pool 
20:33:36.307 INFO  DAGScheduler - ResultStage 174 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.026 s
20:33:36.307 INFO  DAGScheduler - Job 128 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.308 INFO  TaskSchedulerImpl - Killing all running tasks in stage 174: Stage finished
20:33:36.308 INFO  DAGScheduler - Job 128 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.026894 s
20:33:36.315 INFO  MemoryStore - Block broadcast_340 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:36.322 INFO  MemoryStore - Block broadcast_340_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:36.322 INFO  BlockManagerInfo - Added broadcast_340_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:36.322 INFO  SparkContext - Created broadcast 340 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.344 INFO  MemoryStore - Block broadcast_341 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:36.350 INFO  MemoryStore - Block broadcast_341_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:36.350 INFO  BlockManagerInfo - Added broadcast_341_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:36.350 INFO  SparkContext - Created broadcast 341 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.370 INFO  FileInputFormat - Total input files to process : 1
20:33:36.371 INFO  MemoryStore - Block broadcast_342 stored as values in memory (estimated size 160.7 KiB, free 1917.5 MiB)
20:33:36.372 INFO  MemoryStore - Block broadcast_342_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:36.372 INFO  BlockManagerInfo - Added broadcast_342_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:36.372 INFO  SparkContext - Created broadcast 342 from broadcast at ReadsSparkSink.java:133
20:33:36.374 INFO  MemoryStore - Block broadcast_343 stored as values in memory (estimated size 163.2 KiB, free 1917.4 MiB)
20:33:36.374 INFO  MemoryStore - Block broadcast_343_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:36.375 INFO  BlockManagerInfo - Added broadcast_343_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:36.375 INFO  SparkContext - Created broadcast 343 from broadcast at BamSink.java:76
20:33:36.376 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.376 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.376 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.393 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:36.393 INFO  DAGScheduler - Registering RDD 828 (mapToPair at SparkUtils.java:161) as input to shuffle 35
20:33:36.394 INFO  DAGScheduler - Got job 129 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:36.394 INFO  DAGScheduler - Final stage: ResultStage 176 (runJob at SparkHadoopWriter.scala:83)
20:33:36.394 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 175)
20:33:36.394 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 175)
20:33:36.394 INFO  DAGScheduler - Submitting ShuffleMapStage 175 (MapPartitionsRDD[828] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:36.413 INFO  MemoryStore - Block broadcast_344 stored as values in memory (estimated size 520.4 KiB, free 1916.8 MiB)
20:33:36.414 INFO  MemoryStore - Block broadcast_344_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.7 MiB)
20:33:36.415 INFO  BlockManagerInfo - Added broadcast_344_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:36.415 INFO  SparkContext - Created broadcast 344 from broadcast at DAGScheduler.scala:1580
20:33:36.415 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 175 (MapPartitionsRDD[828] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:36.415 INFO  TaskSchedulerImpl - Adding task set 175.0 with 1 tasks resource profile 0
20:33:36.415 INFO  TaskSetManager - Starting task 0.0 in stage 175.0 (TID 231) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:36.416 INFO  Executor - Running task 0.0 in stage 175.0 (TID 231)
20:33:36.446 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:36.462 INFO  Executor - Finished task 0.0 in stage 175.0 (TID 231). 1148 bytes result sent to driver
20:33:36.462 INFO  TaskSetManager - Finished task 0.0 in stage 175.0 (TID 231) in 47 ms on localhost (executor driver) (1/1)
20:33:36.462 INFO  TaskSchedulerImpl - Removed TaskSet 175.0, whose tasks have all completed, from pool 
20:33:36.463 INFO  DAGScheduler - ShuffleMapStage 175 (mapToPair at SparkUtils.java:161) finished in 0.069 s
20:33:36.463 INFO  DAGScheduler - looking for newly runnable stages
20:33:36.463 INFO  DAGScheduler - running: HashSet()
20:33:36.463 INFO  DAGScheduler - waiting: HashSet(ResultStage 176)
20:33:36.463 INFO  DAGScheduler - failed: HashSet()
20:33:36.463 INFO  DAGScheduler - Submitting ResultStage 176 (MapPartitionsRDD[833] at mapToPair at BamSink.java:91), which has no missing parents
20:33:36.475 INFO  MemoryStore - Block broadcast_345 stored as values in memory (estimated size 241.4 KiB, free 1916.4 MiB)
20:33:36.476 INFO  MemoryStore - Block broadcast_345_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1916.4 MiB)
20:33:36.476 INFO  BlockManagerInfo - Added broadcast_345_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.3 MiB)
20:33:36.476 INFO  SparkContext - Created broadcast 345 from broadcast at DAGScheduler.scala:1580
20:33:36.476 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 176 (MapPartitionsRDD[833] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:36.476 INFO  TaskSchedulerImpl - Adding task set 176.0 with 1 tasks resource profile 0
20:33:36.477 INFO  TaskSetManager - Starting task 0.0 in stage 176.0 (TID 232) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:36.477 INFO  Executor - Running task 0.0 in stage 176.0 (TID 232)
20:33:36.481 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:36.481 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:36.492 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.492 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.492 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.492 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.492 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.492 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.517 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033363578884281364147371_0833_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest1.someOtherPlace6034053196241508278/_temporary/0/task_202507152033363578884281364147371_0833_r_000000
20:33:36.517 INFO  SparkHadoopMapRedUtil - attempt_202507152033363578884281364147371_0833_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:36.517 INFO  Executor - Finished task 0.0 in stage 176.0 (TID 232). 1858 bytes result sent to driver
20:33:36.518 INFO  TaskSetManager - Finished task 0.0 in stage 176.0 (TID 232) in 41 ms on localhost (executor driver) (1/1)
20:33:36.518 INFO  TaskSchedulerImpl - Removed TaskSet 176.0, whose tasks have all completed, from pool 
20:33:36.518 INFO  DAGScheduler - ResultStage 176 (runJob at SparkHadoopWriter.scala:83) finished in 0.055 s
20:33:36.518 INFO  DAGScheduler - Job 129 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.518 INFO  TaskSchedulerImpl - Killing all running tasks in stage 176: Stage finished
20:33:36.518 INFO  DAGScheduler - Job 129 finished: runJob at SparkHadoopWriter.scala:83, took 0.124928 s
20:33:36.518 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033363578884281364147371_0833.
20:33:36.523 INFO  SparkHadoopWriter - Write Job job_202507152033363578884281364147371_0833 committed. Elapsed time: 5 ms.
20:33:36.535 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest113700674098529422698.bam
20:33:36.540 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest113700674098529422698.bam done
20:33:36.540 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest1.someOtherPlace6034053196241508278 to /tmp/ReadsSparkSinkUnitTest113700674098529422698.bam.sbi
20:33:36.545 INFO  IndexFileMerger - Done merging .sbi files
20:33:36.545 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest1.someOtherPlace6034053196241508278 to /tmp/ReadsSparkSinkUnitTest113700674098529422698.bam.bai
20:33:36.550 INFO  IndexFileMerger - Done merging .bai files
20:33:36.552 INFO  MemoryStore - Block broadcast_346 stored as values in memory (estimated size 13.3 KiB, free 1916.4 MiB)
20:33:36.552 INFO  MemoryStore - Block broadcast_346_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1916.3 MiB)
20:33:36.552 INFO  BlockManagerInfo - Added broadcast_346_piece0 in memory on localhost:45281 (size: 8.3 KiB, free: 1919.3 MiB)
20:33:36.553 INFO  SparkContext - Created broadcast 346 from broadcast at BamSource.java:104
20:33:36.553 INFO  MemoryStore - Block broadcast_347 stored as values in memory (estimated size 297.9 KiB, free 1916.1 MiB)
20:33:36.561 INFO  BlockManagerInfo - Removed broadcast_338_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:36.561 INFO  BlockManagerInfo - Removed broadcast_341_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:36.562 INFO  BlockManagerInfo - Removed broadcast_336_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:36.562 INFO  BlockManagerInfo - Removed broadcast_339_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.6 MiB)
20:33:36.563 INFO  BlockManagerInfo - Removed broadcast_343_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:36.563 INFO  BlockManagerInfo - Removed broadcast_337_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.7 MiB)
20:33:36.564 INFO  BlockManagerInfo - Removed broadcast_335_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.7 MiB)
20:33:36.564 INFO  BlockManagerInfo - Removed broadcast_329_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:36.565 INFO  BlockManagerInfo - Removed broadcast_345_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.8 MiB)
20:33:36.565 INFO  BlockManagerInfo - Removed broadcast_344_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.9 MiB)
20:33:36.566 INFO  BlockManagerInfo - Removed broadcast_342_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.9 MiB)
20:33:36.568 INFO  MemoryStore - Block broadcast_347_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:36.568 INFO  BlockManagerInfo - Added broadcast_347_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:36.568 INFO  SparkContext - Created broadcast 347 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.577 INFO  FileInputFormat - Total input files to process : 1
20:33:36.591 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:36.591 INFO  DAGScheduler - Got job 130 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:36.591 INFO  DAGScheduler - Final stage: ResultStage 177 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:36.591 INFO  DAGScheduler - Parents of final stage: List()
20:33:36.591 INFO  DAGScheduler - Missing parents: List()
20:33:36.592 INFO  DAGScheduler - Submitting ResultStage 177 (MapPartitionsRDD[839] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:36.598 INFO  MemoryStore - Block broadcast_348 stored as values in memory (estimated size 148.2 KiB, free 1919.2 MiB)
20:33:36.598 INFO  MemoryStore - Block broadcast_348_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1919.1 MiB)
20:33:36.598 INFO  BlockManagerInfo - Added broadcast_348_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.8 MiB)
20:33:36.599 INFO  SparkContext - Created broadcast 348 from broadcast at DAGScheduler.scala:1580
20:33:36.599 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 177 (MapPartitionsRDD[839] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:36.599 INFO  TaskSchedulerImpl - Adding task set 177.0 with 1 tasks resource profile 0
20:33:36.599 INFO  TaskSetManager - Starting task 0.0 in stage 177.0 (TID 233) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:36.600 INFO  Executor - Running task 0.0 in stage 177.0 (TID 233)
20:33:36.611 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest113700674098529422698.bam:0+237038
20:33:36.616 INFO  Executor - Finished task 0.0 in stage 177.0 (TID 233). 651483 bytes result sent to driver
20:33:36.617 INFO  TaskSetManager - Finished task 0.0 in stage 177.0 (TID 233) in 18 ms on localhost (executor driver) (1/1)
20:33:36.617 INFO  TaskSchedulerImpl - Removed TaskSet 177.0, whose tasks have all completed, from pool 
20:33:36.617 INFO  DAGScheduler - ResultStage 177 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.025 s
20:33:36.617 INFO  DAGScheduler - Job 130 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.617 INFO  TaskSchedulerImpl - Killing all running tasks in stage 177: Stage finished
20:33:36.617 INFO  DAGScheduler - Job 130 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.026350 s
20:33:36.627 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:36.627 INFO  DAGScheduler - Got job 131 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:36.627 INFO  DAGScheduler - Final stage: ResultStage 178 (count at ReadsSparkSinkUnitTest.java:185)
20:33:36.627 INFO  DAGScheduler - Parents of final stage: List()
20:33:36.627 INFO  DAGScheduler - Missing parents: List()
20:33:36.627 INFO  DAGScheduler - Submitting ResultStage 178 (MapPartitionsRDD[821] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:36.644 INFO  MemoryStore - Block broadcast_349 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:36.645 INFO  MemoryStore - Block broadcast_349_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.5 MiB)
20:33:36.645 INFO  BlockManagerInfo - Added broadcast_349_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:36.645 INFO  SparkContext - Created broadcast 349 from broadcast at DAGScheduler.scala:1580
20:33:36.646 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 178 (MapPartitionsRDD[821] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:36.646 INFO  TaskSchedulerImpl - Adding task set 178.0 with 1 tasks resource profile 0
20:33:36.646 INFO  TaskSetManager - Starting task 0.0 in stage 178.0 (TID 234) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:36.647 INFO  Executor - Running task 0.0 in stage 178.0 (TID 234)
20:33:36.676 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:36.686 INFO  Executor - Finished task 0.0 in stage 178.0 (TID 234). 989 bytes result sent to driver
20:33:36.686 INFO  TaskSetManager - Finished task 0.0 in stage 178.0 (TID 234) in 40 ms on localhost (executor driver) (1/1)
20:33:36.686 INFO  TaskSchedulerImpl - Removed TaskSet 178.0, whose tasks have all completed, from pool 
20:33:36.686 INFO  DAGScheduler - ResultStage 178 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.059 s
20:33:36.686 INFO  DAGScheduler - Job 131 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.686 INFO  TaskSchedulerImpl - Killing all running tasks in stage 178: Stage finished
20:33:36.686 INFO  DAGScheduler - Job 131 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.059547 s
20:33:36.690 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:36.690 INFO  DAGScheduler - Got job 132 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:36.690 INFO  DAGScheduler - Final stage: ResultStage 179 (count at ReadsSparkSinkUnitTest.java:185)
20:33:36.690 INFO  DAGScheduler - Parents of final stage: List()
20:33:36.690 INFO  DAGScheduler - Missing parents: List()
20:33:36.690 INFO  DAGScheduler - Submitting ResultStage 179 (MapPartitionsRDD[839] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:36.696 INFO  MemoryStore - Block broadcast_350 stored as values in memory (estimated size 148.1 KiB, free 1918.4 MiB)
20:33:36.697 INFO  MemoryStore - Block broadcast_350_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.3 MiB)
20:33:36.697 INFO  BlockManagerInfo - Added broadcast_350_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.6 MiB)
20:33:36.697 INFO  SparkContext - Created broadcast 350 from broadcast at DAGScheduler.scala:1580
20:33:36.697 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 179 (MapPartitionsRDD[839] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:36.697 INFO  TaskSchedulerImpl - Adding task set 179.0 with 1 tasks resource profile 0
20:33:36.698 INFO  TaskSetManager - Starting task 0.0 in stage 179.0 (TID 235) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:36.698 INFO  Executor - Running task 0.0 in stage 179.0 (TID 235)
20:33:36.709 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest113700674098529422698.bam:0+237038
20:33:36.712 INFO  Executor - Finished task 0.0 in stage 179.0 (TID 235). 989 bytes result sent to driver
20:33:36.713 INFO  TaskSetManager - Finished task 0.0 in stage 179.0 (TID 235) in 15 ms on localhost (executor driver) (1/1)
20:33:36.713 INFO  TaskSchedulerImpl - Removed TaskSet 179.0, whose tasks have all completed, from pool 
20:33:36.713 INFO  DAGScheduler - ResultStage 179 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.023 s
20:33:36.713 INFO  DAGScheduler - Job 132 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.713 INFO  TaskSchedulerImpl - Killing all running tasks in stage 179: Stage finished
20:33:36.713 INFO  DAGScheduler - Job 132 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.023243 s
20:33:36.721 INFO  MemoryStore - Block broadcast_351 stored as values in memory (estimated size 297.9 KiB, free 1918.0 MiB)
20:33:36.727 INFO  MemoryStore - Block broadcast_351_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:36.727 INFO  BlockManagerInfo - Added broadcast_351_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:36.728 INFO  SparkContext - Created broadcast 351 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.750 INFO  MemoryStore - Block broadcast_352 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:36.756 INFO  MemoryStore - Block broadcast_352_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:36.756 INFO  BlockManagerInfo - Added broadcast_352_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:36.756 INFO  SparkContext - Created broadcast 352 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.776 INFO  FileInputFormat - Total input files to process : 1
20:33:36.778 INFO  MemoryStore - Block broadcast_353 stored as values in memory (estimated size 160.7 KiB, free 1917.5 MiB)
20:33:36.779 INFO  MemoryStore - Block broadcast_353_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:36.779 INFO  BlockManagerInfo - Added broadcast_353_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:36.779 INFO  SparkContext - Created broadcast 353 from broadcast at ReadsSparkSink.java:133
20:33:36.780 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:36.780 INFO  MemoryStore - Block broadcast_354 stored as values in memory (estimated size 163.2 KiB, free 1917.3 MiB)
20:33:36.781 INFO  MemoryStore - Block broadcast_354_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:36.781 INFO  BlockManagerInfo - Added broadcast_354_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:36.781 INFO  SparkContext - Created broadcast 354 from broadcast at BamSink.java:76
20:33:36.783 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.783 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.783 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.800 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:36.800 INFO  DAGScheduler - Registering RDD 853 (mapToPair at SparkUtils.java:161) as input to shuffle 36
20:33:36.800 INFO  DAGScheduler - Got job 133 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:36.800 INFO  DAGScheduler - Final stage: ResultStage 181 (runJob at SparkHadoopWriter.scala:83)
20:33:36.800 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 180)
20:33:36.800 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 180)
20:33:36.800 INFO  DAGScheduler - Submitting ShuffleMapStage 180 (MapPartitionsRDD[853] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:36.818 INFO  MemoryStore - Block broadcast_355 stored as values in memory (estimated size 520.4 KiB, free 1916.8 MiB)
20:33:36.819 INFO  MemoryStore - Block broadcast_355_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.7 MiB)
20:33:36.819 INFO  BlockManagerInfo - Added broadcast_355_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:36.819 INFO  SparkContext - Created broadcast 355 from broadcast at DAGScheduler.scala:1580
20:33:36.819 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 180 (MapPartitionsRDD[853] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:36.819 INFO  TaskSchedulerImpl - Adding task set 180.0 with 1 tasks resource profile 0
20:33:36.820 INFO  TaskSetManager - Starting task 0.0 in stage 180.0 (TID 236) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:36.820 INFO  Executor - Running task 0.0 in stage 180.0 (TID 236)
20:33:36.850 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:36.869 INFO  Executor - Finished task 0.0 in stage 180.0 (TID 236). 1148 bytes result sent to driver
20:33:36.869 INFO  TaskSetManager - Finished task 0.0 in stage 180.0 (TID 236) in 49 ms on localhost (executor driver) (1/1)
20:33:36.869 INFO  TaskSchedulerImpl - Removed TaskSet 180.0, whose tasks have all completed, from pool 
20:33:36.870 INFO  DAGScheduler - ShuffleMapStage 180 (mapToPair at SparkUtils.java:161) finished in 0.069 s
20:33:36.870 INFO  DAGScheduler - looking for newly runnable stages
20:33:36.870 INFO  DAGScheduler - running: HashSet()
20:33:36.870 INFO  DAGScheduler - waiting: HashSet(ResultStage 181)
20:33:36.870 INFO  DAGScheduler - failed: HashSet()
20:33:36.870 INFO  DAGScheduler - Submitting ResultStage 181 (MapPartitionsRDD[858] at mapToPair at BamSink.java:91), which has no missing parents
20:33:36.877 INFO  MemoryStore - Block broadcast_356 stored as values in memory (estimated size 241.4 KiB, free 1916.4 MiB)
20:33:36.877 INFO  MemoryStore - Block broadcast_356_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1916.4 MiB)
20:33:36.878 INFO  BlockManagerInfo - Added broadcast_356_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.3 MiB)
20:33:36.878 INFO  SparkContext - Created broadcast 356 from broadcast at DAGScheduler.scala:1580
20:33:36.878 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 181 (MapPartitionsRDD[858] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:36.878 INFO  TaskSchedulerImpl - Adding task set 181.0 with 1 tasks resource profile 0
20:33:36.878 INFO  TaskSetManager - Starting task 0.0 in stage 181.0 (TID 237) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:36.879 INFO  Executor - Running task 0.0 in stage 181.0 (TID 237)
20:33:36.883 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:36.883 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:36.895 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.895 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.895 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.895 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:36.895 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:36.895 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:36.914 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033369054829134745705745_0858_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest1.someOtherPlace7763902547894637899/_temporary/0/task_202507152033369054829134745705745_0858_r_000000
20:33:36.914 INFO  SparkHadoopMapRedUtil - attempt_202507152033369054829134745705745_0858_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:36.915 INFO  Executor - Finished task 0.0 in stage 181.0 (TID 237). 1858 bytes result sent to driver
20:33:36.915 INFO  TaskSetManager - Finished task 0.0 in stage 181.0 (TID 237) in 37 ms on localhost (executor driver) (1/1)
20:33:36.915 INFO  TaskSchedulerImpl - Removed TaskSet 181.0, whose tasks have all completed, from pool 
20:33:36.915 INFO  DAGScheduler - ResultStage 181 (runJob at SparkHadoopWriter.scala:83) finished in 0.045 s
20:33:36.915 INFO  DAGScheduler - Job 133 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:36.915 INFO  TaskSchedulerImpl - Killing all running tasks in stage 181: Stage finished
20:33:36.915 INFO  DAGScheduler - Job 133 finished: runJob at SparkHadoopWriter.scala:83, took 0.115598 s
20:33:36.916 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033369054829134745705745_0858.
20:33:36.920 INFO  SparkHadoopWriter - Write Job job_202507152033369054829134745705745_0858 committed. Elapsed time: 4 ms.
20:33:36.931 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest11999791897402549114.bam
20:33:36.936 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest11999791897402549114.bam done
20:33:36.936 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest1.someOtherPlace7763902547894637899 to /tmp/ReadsSparkSinkUnitTest11999791897402549114.bam.bai
20:33:36.940 INFO  IndexFileMerger - Done merging .bai files
20:33:36.943 INFO  MemoryStore - Block broadcast_357 stored as values in memory (estimated size 297.9 KiB, free 1916.1 MiB)
20:33:36.949 INFO  MemoryStore - Block broadcast_357_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.0 MiB)
20:33:36.949 INFO  BlockManagerInfo - Added broadcast_357_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.2 MiB)
20:33:36.949 INFO  SparkContext - Created broadcast 357 from newAPIHadoopFile at PathSplitSource.java:96
20:33:36.969 INFO  FileInputFormat - Total input files to process : 1
20:33:37.004 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:37.004 INFO  DAGScheduler - Got job 134 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:37.004 INFO  DAGScheduler - Final stage: ResultStage 182 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:37.004 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.004 INFO  DAGScheduler - Missing parents: List()
20:33:37.004 INFO  DAGScheduler - Submitting ResultStage 182 (MapPartitionsRDD[865] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.021 INFO  MemoryStore - Block broadcast_358 stored as values in memory (estimated size 426.2 KiB, free 1915.6 MiB)
20:33:37.026 INFO  BlockManagerInfo - Removed broadcast_340_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:37.026 INFO  BlockManagerInfo - Removed broadcast_350_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.3 MiB)
20:33:37.026 INFO  MemoryStore - Block broadcast_358_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.0 MiB)
20:33:37.026 INFO  BlockManagerInfo - Added broadcast_358_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.2 MiB)
20:33:37.027 INFO  SparkContext - Created broadcast 358 from broadcast at DAGScheduler.scala:1580
20:33:37.027 INFO  BlockManagerInfo - Removed broadcast_354_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.2 MiB)
20:33:37.027 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 182 (MapPartitionsRDD[865] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.027 INFO  TaskSchedulerImpl - Adding task set 182.0 with 1 tasks resource profile 0
20:33:37.027 INFO  BlockManagerInfo - Removed broadcast_355_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.4 MiB)
20:33:37.028 INFO  TaskSetManager - Starting task 0.0 in stage 182.0 (TID 238) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:37.028 INFO  BlockManagerInfo - Removed broadcast_348_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.4 MiB)
20:33:37.028 INFO  Executor - Running task 0.0 in stage 182.0 (TID 238)
20:33:37.028 INFO  BlockManagerInfo - Removed broadcast_352_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:37.029 INFO  BlockManagerInfo - Removed broadcast_349_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.6 MiB)
20:33:37.030 INFO  BlockManagerInfo - Removed broadcast_347_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:37.030 INFO  BlockManagerInfo - Removed broadcast_356_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.7 MiB)
20:33:37.032 INFO  BlockManagerInfo - Removed broadcast_353_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:37.033 INFO  BlockManagerInfo - Removed broadcast_346_piece0 on localhost:45281 in memory (size: 8.3 KiB, free: 1919.8 MiB)
20:33:37.059 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11999791897402549114.bam:0+237038
20:33:37.071 INFO  Executor - Finished task 0.0 in stage 182.0 (TID 238). 651483 bytes result sent to driver
20:33:37.074 INFO  TaskSetManager - Finished task 0.0 in stage 182.0 (TID 238) in 47 ms on localhost (executor driver) (1/1)
20:33:37.074 INFO  TaskSchedulerImpl - Removed TaskSet 182.0, whose tasks have all completed, from pool 
20:33:37.074 INFO  DAGScheduler - ResultStage 182 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.070 s
20:33:37.075 INFO  DAGScheduler - Job 134 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.075 INFO  TaskSchedulerImpl - Killing all running tasks in stage 182: Stage finished
20:33:37.075 INFO  DAGScheduler - Job 134 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.070930 s
20:33:37.090 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:37.090 INFO  DAGScheduler - Got job 135 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:37.090 INFO  DAGScheduler - Final stage: ResultStage 183 (count at ReadsSparkSinkUnitTest.java:185)
20:33:37.090 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.090 INFO  DAGScheduler - Missing parents: List()
20:33:37.090 INFO  DAGScheduler - Submitting ResultStage 183 (MapPartitionsRDD[846] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.109 INFO  MemoryStore - Block broadcast_359 stored as values in memory (estimated size 426.1 KiB, free 1918.3 MiB)
20:33:37.111 INFO  MemoryStore - Block broadcast_359_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.2 MiB)
20:33:37.111 INFO  BlockManagerInfo - Added broadcast_359_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.6 MiB)
20:33:37.111 INFO  SparkContext - Created broadcast 359 from broadcast at DAGScheduler.scala:1580
20:33:37.111 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 183 (MapPartitionsRDD[846] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.111 INFO  TaskSchedulerImpl - Adding task set 183.0 with 1 tasks resource profile 0
20:33:37.112 INFO  TaskSetManager - Starting task 0.0 in stage 183.0 (TID 239) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:37.112 INFO  Executor - Running task 0.0 in stage 183.0 (TID 239)
20:33:37.141 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:37.150 INFO  Executor - Finished task 0.0 in stage 183.0 (TID 239). 989 bytes result sent to driver
20:33:37.150 INFO  TaskSetManager - Finished task 0.0 in stage 183.0 (TID 239) in 39 ms on localhost (executor driver) (1/1)
20:33:37.150 INFO  TaskSchedulerImpl - Removed TaskSet 183.0, whose tasks have all completed, from pool 
20:33:37.151 INFO  DAGScheduler - ResultStage 183 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.061 s
20:33:37.151 INFO  DAGScheduler - Job 135 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.151 INFO  TaskSchedulerImpl - Killing all running tasks in stage 183: Stage finished
20:33:37.151 INFO  DAGScheduler - Job 135 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061132 s
20:33:37.154 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:37.154 INFO  DAGScheduler - Got job 136 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:37.154 INFO  DAGScheduler - Final stage: ResultStage 184 (count at ReadsSparkSinkUnitTest.java:185)
20:33:37.154 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.154 INFO  DAGScheduler - Missing parents: List()
20:33:37.154 INFO  DAGScheduler - Submitting ResultStage 184 (MapPartitionsRDD[865] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.171 INFO  MemoryStore - Block broadcast_360 stored as values in memory (estimated size 426.1 KiB, free 1917.8 MiB)
20:33:37.172 INFO  MemoryStore - Block broadcast_360_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.6 MiB)
20:33:37.172 INFO  BlockManagerInfo - Added broadcast_360_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:37.172 INFO  SparkContext - Created broadcast 360 from broadcast at DAGScheduler.scala:1580
20:33:37.173 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 184 (MapPartitionsRDD[865] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.173 INFO  TaskSchedulerImpl - Adding task set 184.0 with 1 tasks resource profile 0
20:33:37.173 INFO  TaskSetManager - Starting task 0.0 in stage 184.0 (TID 240) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:37.173 INFO  Executor - Running task 0.0 in stage 184.0 (TID 240)
20:33:37.203 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11999791897402549114.bam:0+237038
20:33:37.214 INFO  Executor - Finished task 0.0 in stage 184.0 (TID 240). 989 bytes result sent to driver
20:33:37.214 INFO  TaskSetManager - Finished task 0.0 in stage 184.0 (TID 240) in 41 ms on localhost (executor driver) (1/1)
20:33:37.214 INFO  TaskSchedulerImpl - Removed TaskSet 184.0, whose tasks have all completed, from pool 
20:33:37.214 INFO  DAGScheduler - ResultStage 184 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.059 s
20:33:37.214 INFO  DAGScheduler - Job 136 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.215 INFO  TaskSchedulerImpl - Killing all running tasks in stage 184: Stage finished
20:33:37.215 INFO  DAGScheduler - Job 136 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.060612 s
20:33:37.222 INFO  MemoryStore - Block broadcast_361 stored as values in memory (estimated size 297.9 KiB, free 1917.3 MiB)
20:33:37.229 INFO  MemoryStore - Block broadcast_361_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.3 MiB)
20:33:37.229 INFO  BlockManagerInfo - Added broadcast_361_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:37.229 INFO  SparkContext - Created broadcast 361 from newAPIHadoopFile at PathSplitSource.java:96
20:33:37.250 INFO  MemoryStore - Block broadcast_362 stored as values in memory (estimated size 297.9 KiB, free 1917.0 MiB)
20:33:37.256 INFO  MemoryStore - Block broadcast_362_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.9 MiB)
20:33:37.256 INFO  BlockManagerInfo - Added broadcast_362_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:37.257 INFO  SparkContext - Created broadcast 362 from newAPIHadoopFile at PathSplitSource.java:96
20:33:37.276 INFO  FileInputFormat - Total input files to process : 1
20:33:37.278 INFO  MemoryStore - Block broadcast_363 stored as values in memory (estimated size 160.7 KiB, free 1916.8 MiB)
20:33:37.279 INFO  MemoryStore - Block broadcast_363_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.8 MiB)
20:33:37.279 INFO  BlockManagerInfo - Added broadcast_363_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:37.279 INFO  SparkContext - Created broadcast 363 from broadcast at ReadsSparkSink.java:133
20:33:37.279 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:37.280 INFO  MemoryStore - Block broadcast_364 stored as values in memory (estimated size 163.2 KiB, free 1916.6 MiB)
20:33:37.281 INFO  MemoryStore - Block broadcast_364_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.6 MiB)
20:33:37.281 INFO  BlockManagerInfo - Added broadcast_364_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:37.281 INFO  SparkContext - Created broadcast 364 from broadcast at BamSink.java:76
20:33:37.283 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:37.283 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:37.283 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:37.299 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:37.300 INFO  DAGScheduler - Registering RDD 879 (mapToPair at SparkUtils.java:161) as input to shuffle 37
20:33:37.300 INFO  DAGScheduler - Got job 137 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:37.300 INFO  DAGScheduler - Final stage: ResultStage 186 (runJob at SparkHadoopWriter.scala:83)
20:33:37.300 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 185)
20:33:37.300 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 185)
20:33:37.300 INFO  DAGScheduler - Submitting ShuffleMapStage 185 (MapPartitionsRDD[879] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:37.318 INFO  MemoryStore - Block broadcast_365 stored as values in memory (estimated size 520.4 KiB, free 1916.1 MiB)
20:33:37.319 INFO  MemoryStore - Block broadcast_365_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1915.9 MiB)
20:33:37.319 INFO  BlockManagerInfo - Added broadcast_365_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.2 MiB)
20:33:37.319 INFO  SparkContext - Created broadcast 365 from broadcast at DAGScheduler.scala:1580
20:33:37.319 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 185 (MapPartitionsRDD[879] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:37.319 INFO  TaskSchedulerImpl - Adding task set 185.0 with 1 tasks resource profile 0
20:33:37.320 INFO  TaskSetManager - Starting task 0.0 in stage 185.0 (TID 241) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:37.320 INFO  Executor - Running task 0.0 in stage 185.0 (TID 241)
20:33:37.351 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:37.366 INFO  Executor - Finished task 0.0 in stage 185.0 (TID 241). 1148 bytes result sent to driver
20:33:37.366 INFO  TaskSetManager - Finished task 0.0 in stage 185.0 (TID 241) in 46 ms on localhost (executor driver) (1/1)
20:33:37.366 INFO  TaskSchedulerImpl - Removed TaskSet 185.0, whose tasks have all completed, from pool 
20:33:37.366 INFO  DAGScheduler - ShuffleMapStage 185 (mapToPair at SparkUtils.java:161) finished in 0.066 s
20:33:37.366 INFO  DAGScheduler - looking for newly runnable stages
20:33:37.366 INFO  DAGScheduler - running: HashSet()
20:33:37.366 INFO  DAGScheduler - waiting: HashSet(ResultStage 186)
20:33:37.366 INFO  DAGScheduler - failed: HashSet()
20:33:37.367 INFO  DAGScheduler - Submitting ResultStage 186 (MapPartitionsRDD[884] at mapToPair at BamSink.java:91), which has no missing parents
20:33:37.373 INFO  MemoryStore - Block broadcast_366 stored as values in memory (estimated size 241.4 KiB, free 1915.7 MiB)
20:33:37.374 INFO  MemoryStore - Block broadcast_366_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1915.6 MiB)
20:33:37.374 INFO  BlockManagerInfo - Added broadcast_366_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.1 MiB)
20:33:37.374 INFO  SparkContext - Created broadcast 366 from broadcast at DAGScheduler.scala:1580
20:33:37.374 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 186 (MapPartitionsRDD[884] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:37.374 INFO  TaskSchedulerImpl - Adding task set 186.0 with 1 tasks resource profile 0
20:33:37.375 INFO  TaskSetManager - Starting task 0.0 in stage 186.0 (TID 242) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:37.375 INFO  Executor - Running task 0.0 in stage 186.0 (TID 242)
20:33:37.379 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:37.379 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:37.390 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:37.390 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:37.390 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:37.390 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:37.390 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:37.390 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:37.408 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033377684284940822858143_0884_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest1.someOtherPlace8575376303214744561/_temporary/0/task_202507152033377684284940822858143_0884_r_000000
20:33:37.408 INFO  SparkHadoopMapRedUtil - attempt_202507152033377684284940822858143_0884_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:37.409 INFO  Executor - Finished task 0.0 in stage 186.0 (TID 242). 1858 bytes result sent to driver
20:33:37.409 INFO  TaskSetManager - Finished task 0.0 in stage 186.0 (TID 242) in 34 ms on localhost (executor driver) (1/1)
20:33:37.409 INFO  TaskSchedulerImpl - Removed TaskSet 186.0, whose tasks have all completed, from pool 
20:33:37.410 INFO  DAGScheduler - ResultStage 186 (runJob at SparkHadoopWriter.scala:83) finished in 0.043 s
20:33:37.410 INFO  DAGScheduler - Job 137 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.410 INFO  TaskSchedulerImpl - Killing all running tasks in stage 186: Stage finished
20:33:37.410 INFO  DAGScheduler - Job 137 finished: runJob at SparkHadoopWriter.scala:83, took 0.110707 s
20:33:37.410 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033377684284940822858143_0884.
20:33:37.414 INFO  SparkHadoopWriter - Write Job job_202507152033377684284940822858143_0884 committed. Elapsed time: 4 ms.
20:33:37.426 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest11196356205367963658.bam
20:33:37.430 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest11196356205367963658.bam done
20:33:37.430 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest1.someOtherPlace8575376303214744561 to /tmp/ReadsSparkSinkUnitTest11196356205367963658.bam.sbi
20:33:37.435 INFO  IndexFileMerger - Done merging .sbi files
20:33:37.436 INFO  MemoryStore - Block broadcast_367 stored as values in memory (estimated size 320.0 B, free 1915.6 MiB)
20:33:37.436 INFO  MemoryStore - Block broadcast_367_piece0 stored as bytes in memory (estimated size 233.0 B, free 1915.6 MiB)
20:33:37.437 INFO  BlockManagerInfo - Added broadcast_367_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.1 MiB)
20:33:37.437 INFO  SparkContext - Created broadcast 367 from broadcast at BamSource.java:104
20:33:37.438 INFO  MemoryStore - Block broadcast_368 stored as values in memory (estimated size 297.9 KiB, free 1915.3 MiB)
20:33:37.444 INFO  MemoryStore - Block broadcast_368_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1915.3 MiB)
20:33:37.444 INFO  BlockManagerInfo - Added broadcast_368_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.1 MiB)
20:33:37.444 INFO  SparkContext - Created broadcast 368 from newAPIHadoopFile at PathSplitSource.java:96
20:33:37.453 INFO  FileInputFormat - Total input files to process : 1
20:33:37.469 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:37.469 INFO  DAGScheduler - Got job 138 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:37.469 INFO  DAGScheduler - Final stage: ResultStage 187 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:37.469 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.469 INFO  DAGScheduler - Missing parents: List()
20:33:37.469 INFO  DAGScheduler - Submitting ResultStage 187 (MapPartitionsRDD[890] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.475 INFO  MemoryStore - Block broadcast_369 stored as values in memory (estimated size 148.2 KiB, free 1915.2 MiB)
20:33:37.480 INFO  BlockManagerInfo - Removed broadcast_359_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.2 MiB)
20:33:37.480 INFO  MemoryStore - Block broadcast_369_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1915.7 MiB)
20:33:37.480 INFO  BlockManagerInfo - Added broadcast_369_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.2 MiB)
20:33:37.480 INFO  BlockManagerInfo - Removed broadcast_357_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:37.480 INFO  SparkContext - Created broadcast 369 from broadcast at DAGScheduler.scala:1580
20:33:37.480 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 187 (MapPartitionsRDD[890] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.480 INFO  TaskSchedulerImpl - Adding task set 187.0 with 1 tasks resource profile 0
20:33:37.481 INFO  BlockManagerInfo - Removed broadcast_358_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:37.481 INFO  BlockManagerInfo - Removed broadcast_363_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:37.481 INFO  TaskSetManager - Starting task 0.0 in stage 187.0 (TID 243) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:37.481 INFO  BlockManagerInfo - Removed broadcast_362_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:37.481 INFO  Executor - Running task 0.0 in stage 187.0 (TID 243)
20:33:37.482 INFO  BlockManagerInfo - Removed broadcast_366_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.5 MiB)
20:33:37.482 INFO  BlockManagerInfo - Removed broadcast_351_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:37.483 INFO  BlockManagerInfo - Removed broadcast_365_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.7 MiB)
20:33:37.483 INFO  BlockManagerInfo - Removed broadcast_364_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:37.484 INFO  BlockManagerInfo - Removed broadcast_360_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:37.494 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11196356205367963658.bam:0+237038
20:33:37.498 INFO  Executor - Finished task 0.0 in stage 187.0 (TID 243). 651526 bytes result sent to driver
20:33:37.501 INFO  TaskSetManager - Finished task 0.0 in stage 187.0 (TID 243) in 20 ms on localhost (executor driver) (1/1)
20:33:37.501 INFO  TaskSchedulerImpl - Removed TaskSet 187.0, whose tasks have all completed, from pool 
20:33:37.501 INFO  DAGScheduler - ResultStage 187 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.031 s
20:33:37.501 INFO  DAGScheduler - Job 138 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.501 INFO  TaskSchedulerImpl - Killing all running tasks in stage 187: Stage finished
20:33:37.501 INFO  DAGScheduler - Job 138 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.032291 s
20:33:37.511 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:37.511 INFO  DAGScheduler - Got job 139 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:37.511 INFO  DAGScheduler - Final stage: ResultStage 188 (count at ReadsSparkSinkUnitTest.java:185)
20:33:37.511 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.511 INFO  DAGScheduler - Missing parents: List()
20:33:37.511 INFO  DAGScheduler - Submitting ResultStage 188 (MapPartitionsRDD[872] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.528 INFO  MemoryStore - Block broadcast_370 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:37.529 INFO  MemoryStore - Block broadcast_370_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.6 MiB)
20:33:37.529 INFO  BlockManagerInfo - Added broadcast_370_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:37.529 INFO  SparkContext - Created broadcast 370 from broadcast at DAGScheduler.scala:1580
20:33:37.530 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 188 (MapPartitionsRDD[872] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.530 INFO  TaskSchedulerImpl - Adding task set 188.0 with 1 tasks resource profile 0
20:33:37.530 INFO  TaskSetManager - Starting task 0.0 in stage 188.0 (TID 244) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:37.530 INFO  Executor - Running task 0.0 in stage 188.0 (TID 244)
20:33:37.560 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:37.569 INFO  Executor - Finished task 0.0 in stage 188.0 (TID 244). 989 bytes result sent to driver
20:33:37.570 INFO  TaskSetManager - Finished task 0.0 in stage 188.0 (TID 244) in 39 ms on localhost (executor driver) (1/1)
20:33:37.570 INFO  TaskSchedulerImpl - Removed TaskSet 188.0, whose tasks have all completed, from pool 
20:33:37.570 INFO  DAGScheduler - ResultStage 188 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.059 s
20:33:37.570 INFO  DAGScheduler - Job 139 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.570 INFO  TaskSchedulerImpl - Killing all running tasks in stage 188: Stage finished
20:33:37.570 INFO  DAGScheduler - Job 139 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.059022 s
20:33:37.573 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:37.573 INFO  DAGScheduler - Got job 140 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:37.573 INFO  DAGScheduler - Final stage: ResultStage 189 (count at ReadsSparkSinkUnitTest.java:185)
20:33:37.573 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.573 INFO  DAGScheduler - Missing parents: List()
20:33:37.574 INFO  DAGScheduler - Submitting ResultStage 189 (MapPartitionsRDD[890] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.580 INFO  MemoryStore - Block broadcast_371 stored as values in memory (estimated size 148.1 KiB, free 1918.4 MiB)
20:33:37.581 INFO  MemoryStore - Block broadcast_371_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.4 MiB)
20:33:37.581 INFO  BlockManagerInfo - Added broadcast_371_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.6 MiB)
20:33:37.582 INFO  SparkContext - Created broadcast 371 from broadcast at DAGScheduler.scala:1580
20:33:37.582 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 189 (MapPartitionsRDD[890] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.582 INFO  TaskSchedulerImpl - Adding task set 189.0 with 1 tasks resource profile 0
20:33:37.582 INFO  TaskSetManager - Starting task 0.0 in stage 189.0 (TID 245) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:37.582 INFO  Executor - Running task 0.0 in stage 189.0 (TID 245)
20:33:37.598 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest11196356205367963658.bam:0+237038
20:33:37.601 INFO  Executor - Finished task 0.0 in stage 189.0 (TID 245). 989 bytes result sent to driver
20:33:37.601 INFO  TaskSetManager - Finished task 0.0 in stage 189.0 (TID 245) in 19 ms on localhost (executor driver) (1/1)
20:33:37.601 INFO  TaskSchedulerImpl - Removed TaskSet 189.0, whose tasks have all completed, from pool 
20:33:37.601 INFO  DAGScheduler - ResultStage 189 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.027 s
20:33:37.602 INFO  DAGScheduler - Job 140 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.602 INFO  TaskSchedulerImpl - Killing all running tasks in stage 189: Stage finished
20:33:37.602 INFO  DAGScheduler - Job 140 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.028424 s
20:33:37.610 INFO  MemoryStore - Block broadcast_372 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:37.616 INFO  MemoryStore - Block broadcast_372_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:37.617 INFO  BlockManagerInfo - Added broadcast_372_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:37.617 INFO  SparkContext - Created broadcast 372 from newAPIHadoopFile at PathSplitSource.java:96
20:33:37.639 INFO  MemoryStore - Block broadcast_373 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:37.649 INFO  MemoryStore - Block broadcast_373_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:37.649 INFO  BlockManagerInfo - Added broadcast_373_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:37.649 INFO  SparkContext - Created broadcast 373 from newAPIHadoopFile at PathSplitSource.java:96
20:33:37.674 INFO  FileInputFormat - Total input files to process : 1
20:33:37.676 INFO  MemoryStore - Block broadcast_374 stored as values in memory (estimated size 160.7 KiB, free 1917.5 MiB)
20:33:37.677 INFO  MemoryStore - Block broadcast_374_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:37.677 INFO  BlockManagerInfo - Added broadcast_374_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:37.677 INFO  SparkContext - Created broadcast 374 from broadcast at ReadsSparkSink.java:133
20:33:37.678 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:37.678 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:37.678 INFO  MemoryStore - Block broadcast_375 stored as values in memory (estimated size 163.2 KiB, free 1917.4 MiB)
20:33:37.679 INFO  MemoryStore - Block broadcast_375_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:37.679 INFO  BlockManagerInfo - Added broadcast_375_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:37.679 INFO  SparkContext - Created broadcast 375 from broadcast at BamSink.java:76
20:33:37.681 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:37.681 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:37.681 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:37.698 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:37.699 INFO  DAGScheduler - Registering RDD 904 (mapToPair at SparkUtils.java:161) as input to shuffle 38
20:33:37.699 INFO  DAGScheduler - Got job 141 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:37.699 INFO  DAGScheduler - Final stage: ResultStage 191 (runJob at SparkHadoopWriter.scala:83)
20:33:37.699 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 190)
20:33:37.699 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 190)
20:33:37.699 INFO  DAGScheduler - Submitting ShuffleMapStage 190 (MapPartitionsRDD[904] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:37.716 INFO  MemoryStore - Block broadcast_376 stored as values in memory (estimated size 520.4 KiB, free 1916.8 MiB)
20:33:37.718 INFO  MemoryStore - Block broadcast_376_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.7 MiB)
20:33:37.718 INFO  BlockManagerInfo - Added broadcast_376_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:37.718 INFO  SparkContext - Created broadcast 376 from broadcast at DAGScheduler.scala:1580
20:33:37.718 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 190 (MapPartitionsRDD[904] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:37.718 INFO  TaskSchedulerImpl - Adding task set 190.0 with 1 tasks resource profile 0
20:33:37.719 INFO  TaskSetManager - Starting task 0.0 in stage 190.0 (TID 246) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:37.719 INFO  Executor - Running task 0.0 in stage 190.0 (TID 246)
20:33:37.753 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:37.768 INFO  Executor - Finished task 0.0 in stage 190.0 (TID 246). 1148 bytes result sent to driver
20:33:37.768 INFO  TaskSetManager - Finished task 0.0 in stage 190.0 (TID 246) in 49 ms on localhost (executor driver) (1/1)
20:33:37.768 INFO  TaskSchedulerImpl - Removed TaskSet 190.0, whose tasks have all completed, from pool 
20:33:37.768 INFO  DAGScheduler - ShuffleMapStage 190 (mapToPair at SparkUtils.java:161) finished in 0.069 s
20:33:37.768 INFO  DAGScheduler - looking for newly runnable stages
20:33:37.768 INFO  DAGScheduler - running: HashSet()
20:33:37.768 INFO  DAGScheduler - waiting: HashSet(ResultStage 191)
20:33:37.768 INFO  DAGScheduler - failed: HashSet()
20:33:37.768 INFO  DAGScheduler - Submitting ResultStage 191 (MapPartitionsRDD[909] at mapToPair at BamSink.java:91), which has no missing parents
20:33:37.775 INFO  MemoryStore - Block broadcast_377 stored as values in memory (estimated size 241.4 KiB, free 1916.4 MiB)
20:33:37.776 INFO  MemoryStore - Block broadcast_377_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1916.4 MiB)
20:33:37.776 INFO  BlockManagerInfo - Added broadcast_377_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.3 MiB)
20:33:37.776 INFO  SparkContext - Created broadcast 377 from broadcast at DAGScheduler.scala:1580
20:33:37.776 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 191 (MapPartitionsRDD[909] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:37.776 INFO  TaskSchedulerImpl - Adding task set 191.0 with 1 tasks resource profile 0
20:33:37.777 INFO  TaskSetManager - Starting task 0.0 in stage 191.0 (TID 247) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:37.777 INFO  Executor - Running task 0.0 in stage 191.0 (TID 247)
20:33:37.782 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:37.782 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:37.793 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:37.793 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:37.793 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:37.793 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:37.793 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:37.793 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:37.808 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033378271265143735859612_0909_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest1.someOtherPlace407540006950527653/_temporary/0/task_202507152033378271265143735859612_0909_r_000000
20:33:37.808 INFO  SparkHadoopMapRedUtil - attempt_202507152033378271265143735859612_0909_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:37.808 INFO  Executor - Finished task 0.0 in stage 191.0 (TID 247). 1858 bytes result sent to driver
20:33:37.809 INFO  TaskSetManager - Finished task 0.0 in stage 191.0 (TID 247) in 32 ms on localhost (executor driver) (1/1)
20:33:37.809 INFO  TaskSchedulerImpl - Removed TaskSet 191.0, whose tasks have all completed, from pool 
20:33:37.809 INFO  DAGScheduler - ResultStage 191 (runJob at SparkHadoopWriter.scala:83) finished in 0.040 s
20:33:37.809 INFO  DAGScheduler - Job 141 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.809 INFO  TaskSchedulerImpl - Killing all running tasks in stage 191: Stage finished
20:33:37.809 INFO  DAGScheduler - Job 141 finished: runJob at SparkHadoopWriter.scala:83, took 0.110773 s
20:33:37.809 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033378271265143735859612_0909.
20:33:37.814 INFO  SparkHadoopWriter - Write Job job_202507152033378271265143735859612_0909 committed. Elapsed time: 4 ms.
20:33:37.825 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest14064257591437578204.bam
20:33:37.829 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest14064257591437578204.bam done
20:33:37.831 INFO  MemoryStore - Block broadcast_378 stored as values in memory (estimated size 297.9 KiB, free 1916.1 MiB)
20:33:37.837 INFO  MemoryStore - Block broadcast_378_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.0 MiB)
20:33:37.837 INFO  BlockManagerInfo - Added broadcast_378_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:37.838 INFO  SparkContext - Created broadcast 378 from newAPIHadoopFile at PathSplitSource.java:96
20:33:37.857 INFO  FileInputFormat - Total input files to process : 1
20:33:37.892 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:37.892 INFO  DAGScheduler - Got job 142 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:37.892 INFO  DAGScheduler - Final stage: ResultStage 192 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:37.892 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.892 INFO  DAGScheduler - Missing parents: List()
20:33:37.892 INFO  DAGScheduler - Submitting ResultStage 192 (MapPartitionsRDD[916] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.909 INFO  MemoryStore - Block broadcast_379 stored as values in memory (estimated size 426.2 KiB, free 1915.6 MiB)
20:33:37.910 INFO  MemoryStore - Block broadcast_379_piece0 stored as bytes in memory (estimated size 153.7 KiB, free 1915.5 MiB)
20:33:37.910 INFO  BlockManagerInfo - Added broadcast_379_piece0 in memory on localhost:45281 (size: 153.7 KiB, free: 1919.1 MiB)
20:33:37.911 INFO  SparkContext - Created broadcast 379 from broadcast at DAGScheduler.scala:1580
20:33:37.911 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 192 (MapPartitionsRDD[916] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.911 INFO  TaskSchedulerImpl - Adding task set 192.0 with 1 tasks resource profile 0
20:33:37.911 INFO  TaskSetManager - Starting task 0.0 in stage 192.0 (TID 248) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:37.911 INFO  Executor - Running task 0.0 in stage 192.0 (TID 248)
20:33:37.941 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest14064257591437578204.bam:0+237038
20:33:37.956 INFO  BlockManagerInfo - Removed broadcast_367_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.1 MiB)
20:33:37.956 INFO  BlockManagerInfo - Removed broadcast_371_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.2 MiB)
20:33:37.956 INFO  Executor - Finished task 0.0 in stage 192.0 (TID 248). 651569 bytes result sent to driver
20:33:37.957 INFO  BlockManagerInfo - Removed broadcast_368_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.2 MiB)
20:33:37.957 INFO  BlockManagerInfo - Removed broadcast_370_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:37.958 INFO  BlockManagerInfo - Removed broadcast_369_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.4 MiB)
20:33:37.958 INFO  BlockManagerInfo - Removed broadcast_373_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:37.959 INFO  BlockManagerInfo - Removed broadcast_374_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.5 MiB)
20:33:37.959 INFO  TaskSetManager - Finished task 0.0 in stage 192.0 (TID 248) in 48 ms on localhost (executor driver) (1/1)
20:33:37.959 INFO  TaskSchedulerImpl - Removed TaskSet 192.0, whose tasks have all completed, from pool 
20:33:37.959 INFO  DAGScheduler - ResultStage 192 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.067 s
20:33:37.959 INFO  DAGScheduler - Job 142 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:37.959 INFO  TaskSchedulerImpl - Killing all running tasks in stage 192: Stage finished
20:33:37.959 INFO  DAGScheduler - Job 142 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.067465 s
20:33:37.960 INFO  BlockManagerInfo - Removed broadcast_376_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.6 MiB)
20:33:37.960 INFO  BlockManagerInfo - Removed broadcast_377_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.7 MiB)
20:33:37.960 INFO  BlockManagerInfo - Removed broadcast_361_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:37.961 INFO  BlockManagerInfo - Removed broadcast_375_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:37.975 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:37.975 INFO  DAGScheduler - Got job 143 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:37.975 INFO  DAGScheduler - Final stage: ResultStage 193 (count at ReadsSparkSinkUnitTest.java:185)
20:33:37.975 INFO  DAGScheduler - Parents of final stage: List()
20:33:37.975 INFO  DAGScheduler - Missing parents: List()
20:33:37.975 INFO  DAGScheduler - Submitting ResultStage 193 (MapPartitionsRDD[897] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:37.992 INFO  MemoryStore - Block broadcast_380 stored as values in memory (estimated size 426.1 KiB, free 1918.3 MiB)
20:33:37.993 INFO  MemoryStore - Block broadcast_380_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.2 MiB)
20:33:37.993 INFO  BlockManagerInfo - Added broadcast_380_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.6 MiB)
20:33:37.993 INFO  SparkContext - Created broadcast 380 from broadcast at DAGScheduler.scala:1580
20:33:37.994 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 193 (MapPartitionsRDD[897] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:37.994 INFO  TaskSchedulerImpl - Adding task set 193.0 with 1 tasks resource profile 0
20:33:37.994 INFO  TaskSetManager - Starting task 0.0 in stage 193.0 (TID 249) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:37.994 INFO  Executor - Running task 0.0 in stage 193.0 (TID 249)
20:33:38.025 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:38.034 INFO  Executor - Finished task 0.0 in stage 193.0 (TID 249). 989 bytes result sent to driver
20:33:38.035 INFO  TaskSetManager - Finished task 0.0 in stage 193.0 (TID 249) in 41 ms on localhost (executor driver) (1/1)
20:33:38.035 INFO  TaskSchedulerImpl - Removed TaskSet 193.0, whose tasks have all completed, from pool 
20:33:38.035 INFO  DAGScheduler - ResultStage 193 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.060 s
20:33:38.035 INFO  DAGScheduler - Job 143 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.036 INFO  TaskSchedulerImpl - Killing all running tasks in stage 193: Stage finished
20:33:38.036 INFO  DAGScheduler - Job 143 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061370 s
20:33:38.040 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:38.041 INFO  DAGScheduler - Got job 144 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:38.041 INFO  DAGScheduler - Final stage: ResultStage 194 (count at ReadsSparkSinkUnitTest.java:185)
20:33:38.041 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.041 INFO  DAGScheduler - Missing parents: List()
20:33:38.041 INFO  DAGScheduler - Submitting ResultStage 194 (MapPartitionsRDD[916] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.058 INFO  MemoryStore - Block broadcast_381 stored as values in memory (estimated size 426.1 KiB, free 1917.8 MiB)
20:33:38.059 INFO  MemoryStore - Block broadcast_381_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.6 MiB)
20:33:38.059 INFO  BlockManagerInfo - Added broadcast_381_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:38.059 INFO  SparkContext - Created broadcast 381 from broadcast at DAGScheduler.scala:1580
20:33:38.060 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 194 (MapPartitionsRDD[916] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.060 INFO  TaskSchedulerImpl - Adding task set 194.0 with 1 tasks resource profile 0
20:33:38.060 INFO  TaskSetManager - Starting task 0.0 in stage 194.0 (TID 250) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:38.061 INFO  Executor - Running task 0.0 in stage 194.0 (TID 250)
20:33:38.090 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest14064257591437578204.bam:0+237038
20:33:38.101 INFO  Executor - Finished task 0.0 in stage 194.0 (TID 250). 989 bytes result sent to driver
20:33:38.101 INFO  TaskSetManager - Finished task 0.0 in stage 194.0 (TID 250) in 41 ms on localhost (executor driver) (1/1)
20:33:38.101 INFO  TaskSchedulerImpl - Removed TaskSet 194.0, whose tasks have all completed, from pool 
20:33:38.101 INFO  DAGScheduler - ResultStage 194 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.060 s
20:33:38.102 INFO  DAGScheduler - Job 144 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.102 INFO  TaskSchedulerImpl - Killing all running tasks in stage 194: Stage finished
20:33:38.102 INFO  DAGScheduler - Job 144 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061353 s
20:33:38.110 INFO  MemoryStore - Block broadcast_382 stored as values in memory (estimated size 298.0 KiB, free 1917.3 MiB)
20:33:38.118 INFO  MemoryStore - Block broadcast_382_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1917.3 MiB)
20:33:38.118 INFO  BlockManagerInfo - Added broadcast_382_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.4 MiB)
20:33:38.118 INFO  SparkContext - Created broadcast 382 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.140 INFO  MemoryStore - Block broadcast_383 stored as values in memory (estimated size 298.0 KiB, free 1917.0 MiB)
20:33:38.147 INFO  MemoryStore - Block broadcast_383_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1916.9 MiB)
20:33:38.147 INFO  BlockManagerInfo - Added broadcast_383_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.4 MiB)
20:33:38.147 INFO  SparkContext - Created broadcast 383 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.167 INFO  FileInputFormat - Total input files to process : 1
20:33:38.168 INFO  MemoryStore - Block broadcast_384 stored as values in memory (estimated size 160.7 KiB, free 1916.8 MiB)
20:33:38.169 INFO  MemoryStore - Block broadcast_384_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.8 MiB)
20:33:38.169 INFO  BlockManagerInfo - Added broadcast_384_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:38.170 INFO  SparkContext - Created broadcast 384 from broadcast at ReadsSparkSink.java:133
20:33:38.171 INFO  MemoryStore - Block broadcast_385 stored as values in memory (estimated size 163.2 KiB, free 1916.6 MiB)
20:33:38.171 INFO  MemoryStore - Block broadcast_385_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.6 MiB)
20:33:38.171 INFO  BlockManagerInfo - Added broadcast_385_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:38.172 INFO  SparkContext - Created broadcast 385 from broadcast at BamSink.java:76
20:33:38.173 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.173 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.173 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.190 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:38.190 INFO  DAGScheduler - Registering RDD 930 (mapToPair at SparkUtils.java:161) as input to shuffle 39
20:33:38.191 INFO  DAGScheduler - Got job 145 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:38.191 INFO  DAGScheduler - Final stage: ResultStage 196 (runJob at SparkHadoopWriter.scala:83)
20:33:38.191 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 195)
20:33:38.191 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 195)
20:33:38.191 INFO  DAGScheduler - Submitting ShuffleMapStage 195 (MapPartitionsRDD[930] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:38.208 INFO  MemoryStore - Block broadcast_386 stored as values in memory (estimated size 520.4 KiB, free 1916.1 MiB)
20:33:38.209 INFO  MemoryStore - Block broadcast_386_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1915.9 MiB)
20:33:38.209 INFO  BlockManagerInfo - Added broadcast_386_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.2 MiB)
20:33:38.210 INFO  SparkContext - Created broadcast 386 from broadcast at DAGScheduler.scala:1580
20:33:38.210 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 195 (MapPartitionsRDD[930] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:38.210 INFO  TaskSchedulerImpl - Adding task set 195.0 with 1 tasks resource profile 0
20:33:38.210 INFO  TaskSetManager - Starting task 0.0 in stage 195.0 (TID 251) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7901 bytes) 
20:33:38.211 INFO  Executor - Running task 0.0 in stage 195.0 (TID 251)
20:33:38.246 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:38.267 INFO  Executor - Finished task 0.0 in stage 195.0 (TID 251). 1148 bytes result sent to driver
20:33:38.267 INFO  TaskSetManager - Finished task 0.0 in stage 195.0 (TID 251) in 57 ms on localhost (executor driver) (1/1)
20:33:38.267 INFO  TaskSchedulerImpl - Removed TaskSet 195.0, whose tasks have all completed, from pool 
20:33:38.267 INFO  DAGScheduler - ShuffleMapStage 195 (mapToPair at SparkUtils.java:161) finished in 0.076 s
20:33:38.267 INFO  DAGScheduler - looking for newly runnable stages
20:33:38.267 INFO  DAGScheduler - running: HashSet()
20:33:38.267 INFO  DAGScheduler - waiting: HashSet(ResultStage 196)
20:33:38.267 INFO  DAGScheduler - failed: HashSet()
20:33:38.267 INFO  DAGScheduler - Submitting ResultStage 196 (MapPartitionsRDD[935] at mapToPair at BamSink.java:91), which has no missing parents
20:33:38.274 INFO  MemoryStore - Block broadcast_387 stored as values in memory (estimated size 241.4 KiB, free 1915.7 MiB)
20:33:38.275 INFO  MemoryStore - Block broadcast_387_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1915.6 MiB)
20:33:38.275 INFO  BlockManagerInfo - Added broadcast_387_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.1 MiB)
20:33:38.275 INFO  SparkContext - Created broadcast 387 from broadcast at DAGScheduler.scala:1580
20:33:38.275 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 196 (MapPartitionsRDD[935] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:38.276 INFO  TaskSchedulerImpl - Adding task set 196.0 with 1 tasks resource profile 0
20:33:38.276 INFO  TaskSetManager - Starting task 0.0 in stage 196.0 (TID 252) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:38.276 INFO  Executor - Running task 0.0 in stage 196.0 (TID 252)
20:33:38.283 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:38.283 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:38.299 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.299 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.299 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.299 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.299 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.299 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.323 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033382793751476903614701_0935_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest2.someOtherPlace10850635971467721135/_temporary/0/task_202507152033382793751476903614701_0935_r_000000
20:33:38.323 INFO  SparkHadoopMapRedUtil - attempt_202507152033382793751476903614701_0935_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:38.324 INFO  Executor - Finished task 0.0 in stage 196.0 (TID 252). 1858 bytes result sent to driver
20:33:38.324 INFO  TaskSetManager - Finished task 0.0 in stage 196.0 (TID 252) in 48 ms on localhost (executor driver) (1/1)
20:33:38.324 INFO  TaskSchedulerImpl - Removed TaskSet 196.0, whose tasks have all completed, from pool 
20:33:38.324 INFO  DAGScheduler - ResultStage 196 (runJob at SparkHadoopWriter.scala:83) finished in 0.056 s
20:33:38.324 INFO  DAGScheduler - Job 145 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.324 INFO  TaskSchedulerImpl - Killing all running tasks in stage 196: Stage finished
20:33:38.325 INFO  DAGScheduler - Job 145 finished: runJob at SparkHadoopWriter.scala:83, took 0.134643 s
20:33:38.325 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033382793751476903614701_0935.
20:33:38.330 INFO  SparkHadoopWriter - Write Job job_202507152033382793751476903614701_0935 committed. Elapsed time: 4 ms.
20:33:38.341 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest215279200674269542743.bam
20:33:38.346 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest215279200674269542743.bam done
20:33:38.346 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest2.someOtherPlace10850635971467721135 to /tmp/ReadsSparkSinkUnitTest215279200674269542743.bam.sbi
20:33:38.350 INFO  IndexFileMerger - Done merging .sbi files
20:33:38.350 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest2.someOtherPlace10850635971467721135 to /tmp/ReadsSparkSinkUnitTest215279200674269542743.bam.bai
20:33:38.356 INFO  IndexFileMerger - Done merging .bai files
20:33:38.358 INFO  MemoryStore - Block broadcast_388 stored as values in memory (estimated size 320.0 B, free 1915.6 MiB)
20:33:38.358 INFO  MemoryStore - Block broadcast_388_piece0 stored as bytes in memory (estimated size 233.0 B, free 1915.6 MiB)
20:33:38.358 INFO  BlockManagerInfo - Added broadcast_388_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.1 MiB)
20:33:38.359 INFO  SparkContext - Created broadcast 388 from broadcast at BamSource.java:104
20:33:38.360 INFO  MemoryStore - Block broadcast_389 stored as values in memory (estimated size 297.9 KiB, free 1915.3 MiB)
20:33:38.366 INFO  MemoryStore - Block broadcast_389_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1915.3 MiB)
20:33:38.366 INFO  BlockManagerInfo - Added broadcast_389_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.1 MiB)
20:33:38.366 INFO  SparkContext - Created broadcast 389 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.374 INFO  FileInputFormat - Total input files to process : 1
20:33:38.389 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:38.389 INFO  DAGScheduler - Got job 146 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:38.389 INFO  DAGScheduler - Final stage: ResultStage 197 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:38.390 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.390 INFO  DAGScheduler - Missing parents: List()
20:33:38.390 INFO  DAGScheduler - Submitting ResultStage 197 (MapPartitionsRDD[941] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.396 INFO  MemoryStore - Block broadcast_390 stored as values in memory (estimated size 148.2 KiB, free 1915.1 MiB)
20:33:38.397 INFO  MemoryStore - Block broadcast_390_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1915.1 MiB)
20:33:38.397 INFO  BlockManagerInfo - Added broadcast_390_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.0 MiB)
20:33:38.397 INFO  SparkContext - Created broadcast 390 from broadcast at DAGScheduler.scala:1580
20:33:38.397 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 197 (MapPartitionsRDD[941] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.397 INFO  TaskSchedulerImpl - Adding task set 197.0 with 1 tasks resource profile 0
20:33:38.398 INFO  TaskSetManager - Starting task 0.0 in stage 197.0 (TID 253) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:38.398 INFO  Executor - Running task 0.0 in stage 197.0 (TID 253)
20:33:38.409 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest215279200674269542743.bam:0+235514
20:33:38.418 INFO  BlockManagerInfo - Removed broadcast_386_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.2 MiB)
20:33:38.418 INFO  Executor - Finished task 0.0 in stage 197.0 (TID 253). 650227 bytes result sent to driver
20:33:38.418 INFO  BlockManagerInfo - Removed broadcast_387_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.2 MiB)
20:33:38.419 INFO  BlockManagerInfo - Removed broadcast_372_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:38.419 INFO  BlockManagerInfo - Removed broadcast_379_piece0 on localhost:45281 in memory (size: 153.7 KiB, free: 1919.4 MiB)
20:33:38.419 INFO  BlockManagerInfo - Removed broadcast_378_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:38.420 INFO  BlockManagerInfo - Removed broadcast_381_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.6 MiB)
20:33:38.420 INFO  TaskSetManager - Finished task 0.0 in stage 197.0 (TID 253) in 22 ms on localhost (executor driver) (1/1)
20:33:38.421 INFO  TaskSchedulerImpl - Removed TaskSet 197.0, whose tasks have all completed, from pool 
20:33:38.421 INFO  DAGScheduler - ResultStage 197 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.031 s
20:33:38.421 INFO  DAGScheduler - Job 146 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.421 INFO  TaskSchedulerImpl - Killing all running tasks in stage 197: Stage finished
20:33:38.421 INFO  DAGScheduler - Job 146 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.031575 s
20:33:38.422 INFO  BlockManagerInfo - Removed broadcast_383_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.7 MiB)
20:33:38.423 INFO  BlockManagerInfo - Removed broadcast_384_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:38.423 INFO  BlockManagerInfo - Removed broadcast_385_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:38.423 INFO  BlockManagerInfo - Removed broadcast_380_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:38.431 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:38.432 INFO  DAGScheduler - Got job 147 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:38.432 INFO  DAGScheduler - Final stage: ResultStage 198 (count at ReadsSparkSinkUnitTest.java:185)
20:33:38.432 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.432 INFO  DAGScheduler - Missing parents: List()
20:33:38.432 INFO  DAGScheduler - Submitting ResultStage 198 (MapPartitionsRDD[923] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.449 INFO  MemoryStore - Block broadcast_391 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:38.450 INFO  MemoryStore - Block broadcast_391_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.6 MiB)
20:33:38.450 INFO  BlockManagerInfo - Added broadcast_391_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:38.450 INFO  SparkContext - Created broadcast 391 from broadcast at DAGScheduler.scala:1580
20:33:38.450 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 198 (MapPartitionsRDD[923] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.451 INFO  TaskSchedulerImpl - Adding task set 198.0 with 1 tasks resource profile 0
20:33:38.451 INFO  TaskSetManager - Starting task 0.0 in stage 198.0 (TID 254) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7912 bytes) 
20:33:38.451 INFO  Executor - Running task 0.0 in stage 198.0 (TID 254)
20:33:38.481 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:38.492 INFO  Executor - Finished task 0.0 in stage 198.0 (TID 254). 989 bytes result sent to driver
20:33:38.492 INFO  TaskSetManager - Finished task 0.0 in stage 198.0 (TID 254) in 41 ms on localhost (executor driver) (1/1)
20:33:38.492 INFO  TaskSchedulerImpl - Removed TaskSet 198.0, whose tasks have all completed, from pool 
20:33:38.492 INFO  DAGScheduler - ResultStage 198 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.060 s
20:33:38.492 INFO  DAGScheduler - Job 147 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.493 INFO  TaskSchedulerImpl - Killing all running tasks in stage 198: Stage finished
20:33:38.493 INFO  DAGScheduler - Job 147 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061229 s
20:33:38.496 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:38.496 INFO  DAGScheduler - Got job 148 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:38.496 INFO  DAGScheduler - Final stage: ResultStage 199 (count at ReadsSparkSinkUnitTest.java:185)
20:33:38.496 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.496 INFO  DAGScheduler - Missing parents: List()
20:33:38.496 INFO  DAGScheduler - Submitting ResultStage 199 (MapPartitionsRDD[941] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.502 INFO  MemoryStore - Block broadcast_392 stored as values in memory (estimated size 148.1 KiB, free 1918.4 MiB)
20:33:38.503 INFO  MemoryStore - Block broadcast_392_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.4 MiB)
20:33:38.503 INFO  BlockManagerInfo - Added broadcast_392_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.6 MiB)
20:33:38.503 INFO  SparkContext - Created broadcast 392 from broadcast at DAGScheduler.scala:1580
20:33:38.504 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 199 (MapPartitionsRDD[941] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.504 INFO  TaskSchedulerImpl - Adding task set 199.0 with 1 tasks resource profile 0
20:33:38.504 INFO  TaskSetManager - Starting task 0.0 in stage 199.0 (TID 255) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:38.504 INFO  Executor - Running task 0.0 in stage 199.0 (TID 255)
20:33:38.516 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest215279200674269542743.bam:0+235514
20:33:38.520 INFO  Executor - Finished task 0.0 in stage 199.0 (TID 255). 989 bytes result sent to driver
20:33:38.520 INFO  TaskSetManager - Finished task 0.0 in stage 199.0 (TID 255) in 16 ms on localhost (executor driver) (1/1)
20:33:38.520 INFO  TaskSchedulerImpl - Removed TaskSet 199.0, whose tasks have all completed, from pool 
20:33:38.520 INFO  DAGScheduler - ResultStage 199 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.024 s
20:33:38.520 INFO  DAGScheduler - Job 148 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.520 INFO  TaskSchedulerImpl - Killing all running tasks in stage 199: Stage finished
20:33:38.521 INFO  DAGScheduler - Job 148 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.024718 s
20:33:38.529 INFO  MemoryStore - Block broadcast_393 stored as values in memory (estimated size 298.0 KiB, free 1918.1 MiB)
20:33:38.535 INFO  MemoryStore - Block broadcast_393_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:38.535 INFO  BlockManagerInfo - Added broadcast_393_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:38.536 INFO  SparkContext - Created broadcast 393 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.556 INFO  MemoryStore - Block broadcast_394 stored as values in memory (estimated size 298.0 KiB, free 1917.7 MiB)
20:33:38.562 INFO  MemoryStore - Block broadcast_394_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:38.562 INFO  BlockManagerInfo - Added broadcast_394_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:38.563 INFO  SparkContext - Created broadcast 394 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.582 INFO  FileInputFormat - Total input files to process : 1
20:33:38.583 INFO  MemoryStore - Block broadcast_395 stored as values in memory (estimated size 19.6 KiB, free 1917.7 MiB)
20:33:38.584 INFO  MemoryStore - Block broadcast_395_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1917.7 MiB)
20:33:38.584 INFO  BlockManagerInfo - Added broadcast_395_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.5 MiB)
20:33:38.584 INFO  SparkContext - Created broadcast 395 from broadcast at ReadsSparkSink.java:133
20:33:38.585 INFO  MemoryStore - Block broadcast_396 stored as values in memory (estimated size 20.0 KiB, free 1917.6 MiB)
20:33:38.585 INFO  MemoryStore - Block broadcast_396_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1917.6 MiB)
20:33:38.585 INFO  BlockManagerInfo - Added broadcast_396_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.5 MiB)
20:33:38.585 INFO  SparkContext - Created broadcast 396 from broadcast at BamSink.java:76
20:33:38.587 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.587 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.587 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.603 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:38.604 INFO  DAGScheduler - Registering RDD 955 (mapToPair at SparkUtils.java:161) as input to shuffle 40
20:33:38.604 INFO  DAGScheduler - Got job 149 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:38.604 INFO  DAGScheduler - Final stage: ResultStage 201 (runJob at SparkHadoopWriter.scala:83)
20:33:38.604 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 200)
20:33:38.604 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 200)
20:33:38.604 INFO  DAGScheduler - Submitting ShuffleMapStage 200 (MapPartitionsRDD[955] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:38.621 INFO  MemoryStore - Block broadcast_397 stored as values in memory (estimated size 434.3 KiB, free 1917.2 MiB)
20:33:38.622 INFO  MemoryStore - Block broadcast_397_piece0 stored as bytes in memory (estimated size 157.6 KiB, free 1917.1 MiB)
20:33:38.622 INFO  BlockManagerInfo - Added broadcast_397_piece0 in memory on localhost:45281 (size: 157.6 KiB, free: 1919.4 MiB)
20:33:38.622 INFO  SparkContext - Created broadcast 397 from broadcast at DAGScheduler.scala:1580
20:33:38.623 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 200 (MapPartitionsRDD[955] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:38.623 INFO  TaskSchedulerImpl - Adding task set 200.0 with 1 tasks resource profile 0
20:33:38.623 INFO  TaskSetManager - Starting task 0.0 in stage 200.0 (TID 256) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7882 bytes) 
20:33:38.624 INFO  Executor - Running task 0.0 in stage 200.0 (TID 256)
20:33:38.654 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:38.666 INFO  Executor - Finished task 0.0 in stage 200.0 (TID 256). 1148 bytes result sent to driver
20:33:38.667 INFO  TaskSetManager - Finished task 0.0 in stage 200.0 (TID 256) in 43 ms on localhost (executor driver) (1/1)
20:33:38.667 INFO  TaskSchedulerImpl - Removed TaskSet 200.0, whose tasks have all completed, from pool 
20:33:38.667 INFO  DAGScheduler - ShuffleMapStage 200 (mapToPair at SparkUtils.java:161) finished in 0.063 s
20:33:38.667 INFO  DAGScheduler - looking for newly runnable stages
20:33:38.667 INFO  DAGScheduler - running: HashSet()
20:33:38.667 INFO  DAGScheduler - waiting: HashSet(ResultStage 201)
20:33:38.667 INFO  DAGScheduler - failed: HashSet()
20:33:38.667 INFO  DAGScheduler - Submitting ResultStage 201 (MapPartitionsRDD[960] at mapToPair at BamSink.java:91), which has no missing parents
20:33:38.673 INFO  MemoryStore - Block broadcast_398 stored as values in memory (estimated size 155.3 KiB, free 1916.9 MiB)
20:33:38.674 INFO  MemoryStore - Block broadcast_398_piece0 stored as bytes in memory (estimated size 58.5 KiB, free 1916.8 MiB)
20:33:38.674 INFO  BlockManagerInfo - Added broadcast_398_piece0 in memory on localhost:45281 (size: 58.5 KiB, free: 1919.3 MiB)
20:33:38.674 INFO  SparkContext - Created broadcast 398 from broadcast at DAGScheduler.scala:1580
20:33:38.674 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 201 (MapPartitionsRDD[960] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:38.674 INFO  TaskSchedulerImpl - Adding task set 201.0 with 1 tasks resource profile 0
20:33:38.675 INFO  TaskSetManager - Starting task 0.0 in stage 201.0 (TID 257) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:38.675 INFO  Executor - Running task 0.0 in stage 201.0 (TID 257)
20:33:38.679 INFO  ShuffleBlockFetcherIterator - Getting 1 (312.6 KiB) non-empty blocks including 1 (312.6 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:38.679 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:38.689 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.690 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.690 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.690 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.690 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.690 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.711 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033389101107200781430521_0960_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest3.someOtherPlace8593226009995322952/_temporary/0/task_202507152033389101107200781430521_0960_r_000000
20:33:38.711 INFO  SparkHadoopMapRedUtil - attempt_202507152033389101107200781430521_0960_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:38.712 INFO  Executor - Finished task 0.0 in stage 201.0 (TID 257). 1858 bytes result sent to driver
20:33:38.712 INFO  TaskSetManager - Finished task 0.0 in stage 201.0 (TID 257) in 37 ms on localhost (executor driver) (1/1)
20:33:38.712 INFO  TaskSchedulerImpl - Removed TaskSet 201.0, whose tasks have all completed, from pool 
20:33:38.713 INFO  DAGScheduler - ResultStage 201 (runJob at SparkHadoopWriter.scala:83) finished in 0.046 s
20:33:38.713 INFO  DAGScheduler - Job 149 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.713 INFO  TaskSchedulerImpl - Killing all running tasks in stage 201: Stage finished
20:33:38.713 INFO  DAGScheduler - Job 149 finished: runJob at SparkHadoopWriter.scala:83, took 0.109776 s
20:33:38.713 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033389101107200781430521_0960.
20:33:38.718 INFO  SparkHadoopWriter - Write Job job_202507152033389101107200781430521_0960 committed. Elapsed time: 4 ms.
20:33:38.728 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest312105970388397425190.bam
20:33:38.732 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest312105970388397425190.bam done
20:33:38.732 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/ReadsSparkSinkUnitTest3.someOtherPlace8593226009995322952 to /tmp/ReadsSparkSinkUnitTest312105970388397425190.bam.sbi
20:33:38.737 INFO  IndexFileMerger - Done merging .sbi files
20:33:38.737 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/ReadsSparkSinkUnitTest3.someOtherPlace8593226009995322952 to /tmp/ReadsSparkSinkUnitTest312105970388397425190.bam.bai
20:33:38.742 INFO  IndexFileMerger - Done merging .bai files
20:33:38.743 INFO  MemoryStore - Block broadcast_399 stored as values in memory (estimated size 312.0 B, free 1916.8 MiB)
20:33:38.743 INFO  MemoryStore - Block broadcast_399_piece0 stored as bytes in memory (estimated size 231.0 B, free 1916.8 MiB)
20:33:38.744 INFO  BlockManagerInfo - Added broadcast_399_piece0 in memory on localhost:45281 (size: 231.0 B, free: 1919.3 MiB)
20:33:38.744 INFO  SparkContext - Created broadcast 399 from broadcast at BamSource.java:104
20:33:38.745 INFO  MemoryStore - Block broadcast_400 stored as values in memory (estimated size 297.9 KiB, free 1916.6 MiB)
20:33:38.751 INFO  MemoryStore - Block broadcast_400_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.5 MiB)
20:33:38.751 INFO  BlockManagerInfo - Added broadcast_400_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:38.751 INFO  SparkContext - Created broadcast 400 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.760 INFO  FileInputFormat - Total input files to process : 1
20:33:38.774 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:38.774 INFO  DAGScheduler - Got job 150 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:38.774 INFO  DAGScheduler - Final stage: ResultStage 202 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:38.774 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.774 INFO  DAGScheduler - Missing parents: List()
20:33:38.774 INFO  DAGScheduler - Submitting ResultStage 202 (MapPartitionsRDD[966] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.780 INFO  MemoryStore - Block broadcast_401 stored as values in memory (estimated size 148.2 KiB, free 1916.4 MiB)
20:33:38.781 INFO  MemoryStore - Block broadcast_401_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1916.3 MiB)
20:33:38.781 INFO  BlockManagerInfo - Added broadcast_401_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.2 MiB)
20:33:38.781 INFO  SparkContext - Created broadcast 401 from broadcast at DAGScheduler.scala:1580
20:33:38.781 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 202 (MapPartitionsRDD[966] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.782 INFO  TaskSchedulerImpl - Adding task set 202.0 with 1 tasks resource profile 0
20:33:38.782 INFO  TaskSetManager - Starting task 0.0 in stage 202.0 (TID 258) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:38.782 INFO  Executor - Running task 0.0 in stage 202.0 (TID 258)
20:33:38.794 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest312105970388397425190.bam:0+236517
20:33:38.801 INFO  BlockManagerInfo - Removed broadcast_390_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.3 MiB)
20:33:38.801 INFO  BlockManagerInfo - Removed broadcast_394_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:38.802 INFO  Executor - Finished task 0.0 in stage 202.0 (TID 258). 749556 bytes result sent to driver
20:33:38.802 INFO  BlockManagerInfo - Removed broadcast_397_piece0 on localhost:45281 in memory (size: 157.6 KiB, free: 1919.5 MiB)
20:33:38.802 INFO  BlockManagerInfo - Removed broadcast_392_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.5 MiB)
20:33:38.803 INFO  BlockManagerInfo - Removed broadcast_395_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.5 MiB)
20:33:38.803 INFO  BlockManagerInfo - Removed broadcast_398_piece0 on localhost:45281 in memory (size: 58.5 KiB, free: 1919.6 MiB)
20:33:38.804 INFO  TaskSetManager - Finished task 0.0 in stage 202.0 (TID 258) in 22 ms on localhost (executor driver) (1/1)
20:33:38.804 INFO  TaskSchedulerImpl - Removed TaskSet 202.0, whose tasks have all completed, from pool 
20:33:38.804 INFO  BlockManagerInfo - Removed broadcast_396_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.6 MiB)
20:33:38.804 INFO  DAGScheduler - ResultStage 202 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.029 s
20:33:38.804 INFO  DAGScheduler - Job 150 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.804 INFO  TaskSchedulerImpl - Killing all running tasks in stage 202: Stage finished
20:33:38.804 INFO  DAGScheduler - Job 150 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.030358 s
20:33:38.805 INFO  BlockManagerInfo - Removed broadcast_388_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.6 MiB)
20:33:38.805 INFO  BlockManagerInfo - Removed broadcast_382_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.6 MiB)
20:33:38.805 INFO  BlockManagerInfo - Removed broadcast_389_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:38.806 INFO  BlockManagerInfo - Removed broadcast_391_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:38.816 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:38.816 INFO  DAGScheduler - Got job 151 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:38.816 INFO  DAGScheduler - Final stage: ResultStage 203 (count at ReadsSparkSinkUnitTest.java:185)
20:33:38.816 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.816 INFO  DAGScheduler - Missing parents: List()
20:33:38.816 INFO  DAGScheduler - Submitting ResultStage 203 (MapPartitionsRDD[948] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.834 INFO  MemoryStore - Block broadcast_402 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:38.836 INFO  MemoryStore - Block broadcast_402_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.6 MiB)
20:33:38.836 INFO  BlockManagerInfo - Added broadcast_402_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:38.836 INFO  SparkContext - Created broadcast 402 from broadcast at DAGScheduler.scala:1580
20:33:38.836 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 203 (MapPartitionsRDD[948] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.836 INFO  TaskSchedulerImpl - Adding task set 203.0 with 1 tasks resource profile 0
20:33:38.837 INFO  TaskSetManager - Starting task 0.0 in stage 203.0 (TID 259) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7893 bytes) 
20:33:38.837 INFO  Executor - Running task 0.0 in stage 203.0 (TID 259)
20:33:38.867 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:38.874 INFO  Executor - Finished task 0.0 in stage 203.0 (TID 259). 989 bytes result sent to driver
20:33:38.874 INFO  TaskSetManager - Finished task 0.0 in stage 203.0 (TID 259) in 37 ms on localhost (executor driver) (1/1)
20:33:38.874 INFO  TaskSchedulerImpl - Removed TaskSet 203.0, whose tasks have all completed, from pool 
20:33:38.874 INFO  DAGScheduler - ResultStage 203 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.058 s
20:33:38.874 INFO  DAGScheduler - Job 151 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.874 INFO  TaskSchedulerImpl - Killing all running tasks in stage 203: Stage finished
20:33:38.875 INFO  DAGScheduler - Job 151 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.058945 s
20:33:38.878 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:38.878 INFO  DAGScheduler - Got job 152 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:38.878 INFO  DAGScheduler - Final stage: ResultStage 204 (count at ReadsSparkSinkUnitTest.java:185)
20:33:38.878 INFO  DAGScheduler - Parents of final stage: List()
20:33:38.878 INFO  DAGScheduler - Missing parents: List()
20:33:38.878 INFO  DAGScheduler - Submitting ResultStage 204 (MapPartitionsRDD[966] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:38.884 INFO  MemoryStore - Block broadcast_403 stored as values in memory (estimated size 148.1 KiB, free 1918.4 MiB)
20:33:38.885 INFO  MemoryStore - Block broadcast_403_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.4 MiB)
20:33:38.885 INFO  BlockManagerInfo - Added broadcast_403_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.6 MiB)
20:33:38.885 INFO  SparkContext - Created broadcast 403 from broadcast at DAGScheduler.scala:1580
20:33:38.885 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 204 (MapPartitionsRDD[966] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:38.885 INFO  TaskSchedulerImpl - Adding task set 204.0 with 1 tasks resource profile 0
20:33:38.886 INFO  TaskSetManager - Starting task 0.0 in stage 204.0 (TID 260) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:38.886 INFO  Executor - Running task 0.0 in stage 204.0 (TID 260)
20:33:38.897 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest312105970388397425190.bam:0+236517
20:33:38.900 INFO  Executor - Finished task 0.0 in stage 204.0 (TID 260). 989 bytes result sent to driver
20:33:38.900 INFO  TaskSetManager - Finished task 0.0 in stage 204.0 (TID 260) in 14 ms on localhost (executor driver) (1/1)
20:33:38.900 INFO  TaskSchedulerImpl - Removed TaskSet 204.0, whose tasks have all completed, from pool 
20:33:38.901 INFO  DAGScheduler - ResultStage 204 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.023 s
20:33:38.901 INFO  DAGScheduler - Job 152 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:38.901 INFO  TaskSchedulerImpl - Killing all running tasks in stage 204: Stage finished
20:33:38.901 INFO  DAGScheduler - Job 152 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.022904 s
20:33:38.908 INFO  MemoryStore - Block broadcast_404 stored as values in memory (estimated size 576.0 B, free 1918.4 MiB)
20:33:38.909 INFO  MemoryStore - Block broadcast_404_piece0 stored as bytes in memory (estimated size 228.0 B, free 1918.4 MiB)
20:33:38.909 INFO  BlockManagerInfo - Added broadcast_404_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.6 MiB)
20:33:38.909 INFO  SparkContext - Created broadcast 404 from broadcast at CramSource.java:114
20:33:38.910 INFO  MemoryStore - Block broadcast_405 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:38.916 INFO  MemoryStore - Block broadcast_405_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:38.916 INFO  BlockManagerInfo - Added broadcast_405_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:38.916 INFO  SparkContext - Created broadcast 405 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.932 INFO  MemoryStore - Block broadcast_406 stored as values in memory (estimated size 576.0 B, free 1918.0 MiB)
20:33:38.932 INFO  MemoryStore - Block broadcast_406_piece0 stored as bytes in memory (estimated size 228.0 B, free 1918.0 MiB)
20:33:38.932 INFO  BlockManagerInfo - Added broadcast_406_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.6 MiB)
20:33:38.933 INFO  SparkContext - Created broadcast 406 from broadcast at CramSource.java:114
20:33:38.933 INFO  MemoryStore - Block broadcast_407 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:38.939 INFO  MemoryStore - Block broadcast_407_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:38.939 INFO  BlockManagerInfo - Added broadcast_407_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:38.940 INFO  SparkContext - Created broadcast 407 from newAPIHadoopFile at PathSplitSource.java:96
20:33:38.953 INFO  FileInputFormat - Total input files to process : 1
20:33:38.954 INFO  MemoryStore - Block broadcast_408 stored as values in memory (estimated size 6.0 KiB, free 1917.7 MiB)
20:33:38.955 INFO  MemoryStore - Block broadcast_408_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1917.7 MiB)
20:33:38.955 INFO  BlockManagerInfo - Added broadcast_408_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.5 MiB)
20:33:38.955 INFO  SparkContext - Created broadcast 408 from broadcast at ReadsSparkSink.java:133
20:33:38.956 INFO  MemoryStore - Block broadcast_409 stored as values in memory (estimated size 6.2 KiB, free 1917.7 MiB)
20:33:38.956 INFO  MemoryStore - Block broadcast_409_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1917.7 MiB)
20:33:38.956 INFO  BlockManagerInfo - Added broadcast_409_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.5 MiB)
20:33:38.956 INFO  SparkContext - Created broadcast 409 from broadcast at CramSink.java:76
20:33:38.958 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:38.958 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:38.958 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:38.974 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:38.975 INFO  DAGScheduler - Registering RDD 978 (mapToPair at SparkUtils.java:161) as input to shuffle 41
20:33:38.975 INFO  DAGScheduler - Got job 153 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:38.975 INFO  DAGScheduler - Final stage: ResultStage 206 (runJob at SparkHadoopWriter.scala:83)
20:33:38.975 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 205)
20:33:38.975 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 205)
20:33:38.975 INFO  DAGScheduler - Submitting ShuffleMapStage 205 (MapPartitionsRDD[978] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:38.987 INFO  MemoryStore - Block broadcast_410 stored as values in memory (estimated size 292.8 KiB, free 1917.4 MiB)
20:33:38.988 INFO  MemoryStore - Block broadcast_410_piece0 stored as bytes in memory (estimated size 107.3 KiB, free 1917.3 MiB)
20:33:38.988 INFO  BlockManagerInfo - Added broadcast_410_piece0 in memory on localhost:45281 (size: 107.3 KiB, free: 1919.4 MiB)
20:33:38.988 INFO  SparkContext - Created broadcast 410 from broadcast at DAGScheduler.scala:1580
20:33:38.989 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 205 (MapPartitionsRDD[978] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:38.989 INFO  TaskSchedulerImpl - Adding task set 205.0 with 1 tasks resource profile 0
20:33:38.989 INFO  TaskSetManager - Starting task 0.0 in stage 205.0 (TID 261) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7869 bytes) 
20:33:38.989 INFO  Executor - Running task 0.0 in stage 205.0 (TID 261)
20:33:39.010 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:39.025 INFO  Executor - Finished task 0.0 in stage 205.0 (TID 261). 1148 bytes result sent to driver
20:33:39.025 INFO  TaskSetManager - Finished task 0.0 in stage 205.0 (TID 261) in 36 ms on localhost (executor driver) (1/1)
20:33:39.025 INFO  TaskSchedulerImpl - Removed TaskSet 205.0, whose tasks have all completed, from pool 
20:33:39.025 INFO  DAGScheduler - ShuffleMapStage 205 (mapToPair at SparkUtils.java:161) finished in 0.050 s
20:33:39.025 INFO  DAGScheduler - looking for newly runnable stages
20:33:39.025 INFO  DAGScheduler - running: HashSet()
20:33:39.025 INFO  DAGScheduler - waiting: HashSet(ResultStage 206)
20:33:39.025 INFO  DAGScheduler - failed: HashSet()
20:33:39.026 INFO  DAGScheduler - Submitting ResultStage 206 (MapPartitionsRDD[983] at mapToPair at CramSink.java:89), which has no missing parents
20:33:39.032 INFO  MemoryStore - Block broadcast_411 stored as values in memory (estimated size 153.2 KiB, free 1917.1 MiB)
20:33:39.033 INFO  MemoryStore - Block broadcast_411_piece0 stored as bytes in memory (estimated size 58.0 KiB, free 1917.1 MiB)
20:33:39.033 INFO  BlockManagerInfo - Added broadcast_411_piece0 in memory on localhost:45281 (size: 58.0 KiB, free: 1919.4 MiB)
20:33:39.033 INFO  SparkContext - Created broadcast 411 from broadcast at DAGScheduler.scala:1580
20:33:39.033 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 206 (MapPartitionsRDD[983] at mapToPair at CramSink.java:89) (first 15 tasks are for partitions Vector(0))
20:33:39.033 INFO  TaskSchedulerImpl - Adding task set 206.0 with 1 tasks resource profile 0
20:33:39.034 INFO  TaskSetManager - Starting task 0.0 in stage 206.0 (TID 262) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:39.034 INFO  Executor - Running task 0.0 in stage 206.0 (TID 262)
20:33:39.038 INFO  ShuffleBlockFetcherIterator - Getting 1 (82.3 KiB) non-empty blocks including 1 (82.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:39.038 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:39.044 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:39.044 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.044 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.044 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:39.044 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.044 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.090 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033382608549816670296160_0983_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest5.someOtherPlace18339599667454897089/_temporary/0/task_202507152033382608549816670296160_0983_r_000000
20:33:39.090 INFO  SparkHadoopMapRedUtil - attempt_202507152033382608549816670296160_0983_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:39.091 INFO  Executor - Finished task 0.0 in stage 206.0 (TID 262). 1858 bytes result sent to driver
20:33:39.091 INFO  TaskSetManager - Finished task 0.0 in stage 206.0 (TID 262) in 57 ms on localhost (executor driver) (1/1)
20:33:39.091 INFO  TaskSchedulerImpl - Removed TaskSet 206.0, whose tasks have all completed, from pool 
20:33:39.091 INFO  DAGScheduler - ResultStage 206 (runJob at SparkHadoopWriter.scala:83) finished in 0.065 s
20:33:39.092 INFO  DAGScheduler - Job 153 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.092 INFO  TaskSchedulerImpl - Killing all running tasks in stage 206: Stage finished
20:33:39.092 INFO  DAGScheduler - Job 153 finished: runJob at SparkHadoopWriter.scala:83, took 0.117239 s
20:33:39.092 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033382608549816670296160_0983.
20:33:39.097 INFO  SparkHadoopWriter - Write Job job_202507152033382608549816670296160_0983 committed. Elapsed time: 4 ms.
20:33:39.109 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/ReadsSparkSinkUnitTest59837365120138972489.cram
20:33:39.113 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest59837365120138972489.cram done
20:33:39.115 INFO  MemoryStore - Block broadcast_412 stored as values in memory (estimated size 504.0 B, free 1917.1 MiB)
20:33:39.120 INFO  MemoryStore - Block broadcast_412_piece0 stored as bytes in memory (estimated size 159.0 B, free 1917.1 MiB)
20:33:39.120 INFO  BlockManagerInfo - Added broadcast_412_piece0 in memory on localhost:45281 (size: 159.0 B, free: 1919.4 MiB)
20:33:39.120 INFO  BlockManagerInfo - Removed broadcast_401_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.4 MiB)
20:33:39.121 INFO  SparkContext - Created broadcast 412 from broadcast at CramSource.java:114
20:33:39.121 INFO  BlockManagerInfo - Removed broadcast_402_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.6 MiB)
20:33:39.122 INFO  BlockManagerInfo - Removed broadcast_411_piece0 on localhost:45281 in memory (size: 58.0 KiB, free: 1919.6 MiB)
20:33:39.122 INFO  MemoryStore - Block broadcast_413 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:39.122 INFO  BlockManagerInfo - Removed broadcast_406_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.6 MiB)
20:33:39.123 INFO  BlockManagerInfo - Removed broadcast_407_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:39.124 INFO  BlockManagerInfo - Removed broadcast_403_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.7 MiB)
20:33:39.124 INFO  BlockManagerInfo - Removed broadcast_393_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:39.124 INFO  BlockManagerInfo - Removed broadcast_399_piece0 on localhost:45281 in memory (size: 231.0 B, free: 1919.8 MiB)
20:33:39.125 INFO  BlockManagerInfo - Removed broadcast_409_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.8 MiB)
20:33:39.125 INFO  BlockManagerInfo - Removed broadcast_400_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:39.126 INFO  BlockManagerInfo - Removed broadcast_410_piece0 on localhost:45281 in memory (size: 107.3 KiB, free: 1919.9 MiB)
20:33:39.126 INFO  BlockManagerInfo - Removed broadcast_408_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1920.0 MiB)
20:33:39.130 INFO  MemoryStore - Block broadcast_413_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:39.130 INFO  BlockManagerInfo - Added broadcast_413_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:39.130 INFO  SparkContext - Created broadcast 413 from newAPIHadoopFile at PathSplitSource.java:96
20:33:39.144 INFO  FileInputFormat - Total input files to process : 1
20:33:39.169 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:39.169 INFO  DAGScheduler - Got job 154 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:39.169 INFO  DAGScheduler - Final stage: ResultStage 207 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:39.169 INFO  DAGScheduler - Parents of final stage: List()
20:33:39.170 INFO  DAGScheduler - Missing parents: List()
20:33:39.170 INFO  DAGScheduler - Submitting ResultStage 207 (MapPartitionsRDD[989] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:39.181 INFO  MemoryStore - Block broadcast_414 stored as values in memory (estimated size 286.8 KiB, free 1919.0 MiB)
20:33:39.182 INFO  MemoryStore - Block broadcast_414_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1918.9 MiB)
20:33:39.182 INFO  BlockManagerInfo - Added broadcast_414_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.8 MiB)
20:33:39.182 INFO  SparkContext - Created broadcast 414 from broadcast at DAGScheduler.scala:1580
20:33:39.182 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 207 (MapPartitionsRDD[989] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:39.182 INFO  TaskSchedulerImpl - Adding task set 207.0 with 1 tasks resource profile 0
20:33:39.183 INFO  TaskSetManager - Starting task 0.0 in stage 207.0 (TID 263) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:39.183 INFO  Executor - Running task 0.0 in stage 207.0 (TID 263)
20:33:39.204 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest59837365120138972489.cram:0+43713
20:33:39.215 INFO  Executor - Finished task 0.0 in stage 207.0 (TID 263). 154101 bytes result sent to driver
20:33:39.215 INFO  TaskSetManager - Finished task 0.0 in stage 207.0 (TID 263) in 32 ms on localhost (executor driver) (1/1)
20:33:39.215 INFO  TaskSchedulerImpl - Removed TaskSet 207.0, whose tasks have all completed, from pool 
20:33:39.216 INFO  DAGScheduler - ResultStage 207 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.046 s
20:33:39.216 INFO  DAGScheduler - Job 154 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.216 INFO  TaskSchedulerImpl - Killing all running tasks in stage 207: Stage finished
20:33:39.216 INFO  DAGScheduler - Job 154 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.046757 s
20:33:39.224 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:39.224 INFO  DAGScheduler - Got job 155 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:39.224 INFO  DAGScheduler - Final stage: ResultStage 208 (count at ReadsSparkSinkUnitTest.java:185)
20:33:39.224 INFO  DAGScheduler - Parents of final stage: List()
20:33:39.224 INFO  DAGScheduler - Missing parents: List()
20:33:39.224 INFO  DAGScheduler - Submitting ResultStage 208 (MapPartitionsRDD[972] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:39.237 INFO  MemoryStore - Block broadcast_415 stored as values in memory (estimated size 286.8 KiB, free 1918.7 MiB)
20:33:39.238 INFO  MemoryStore - Block broadcast_415_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1918.6 MiB)
20:33:39.238 INFO  BlockManagerInfo - Added broadcast_415_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.7 MiB)
20:33:39.239 INFO  SparkContext - Created broadcast 415 from broadcast at DAGScheduler.scala:1580
20:33:39.239 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 208 (MapPartitionsRDD[972] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:39.239 INFO  TaskSchedulerImpl - Adding task set 208.0 with 1 tasks resource profile 0
20:33:39.239 INFO  TaskSetManager - Starting task 0.0 in stage 208.0 (TID 264) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7880 bytes) 
20:33:39.240 INFO  Executor - Running task 0.0 in stage 208.0 (TID 264)
20:33:39.264 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:39.271 INFO  Executor - Finished task 0.0 in stage 208.0 (TID 264). 989 bytes result sent to driver
20:33:39.271 INFO  TaskSetManager - Finished task 0.0 in stage 208.0 (TID 264) in 32 ms on localhost (executor driver) (1/1)
20:33:39.271 INFO  TaskSchedulerImpl - Removed TaskSet 208.0, whose tasks have all completed, from pool 
20:33:39.271 INFO  DAGScheduler - ResultStage 208 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.047 s
20:33:39.271 INFO  DAGScheduler - Job 155 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.271 INFO  TaskSchedulerImpl - Killing all running tasks in stage 208: Stage finished
20:33:39.272 INFO  DAGScheduler - Job 155 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.048049 s
20:33:39.276 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:39.276 INFO  DAGScheduler - Got job 156 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:39.276 INFO  DAGScheduler - Final stage: ResultStage 209 (count at ReadsSparkSinkUnitTest.java:185)
20:33:39.276 INFO  DAGScheduler - Parents of final stage: List()
20:33:39.276 INFO  DAGScheduler - Missing parents: List()
20:33:39.277 INFO  DAGScheduler - Submitting ResultStage 209 (MapPartitionsRDD[989] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:39.288 INFO  MemoryStore - Block broadcast_416 stored as values in memory (estimated size 286.8 KiB, free 1918.3 MiB)
20:33:39.289 INFO  MemoryStore - Block broadcast_416_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1918.2 MiB)
20:33:39.289 INFO  BlockManagerInfo - Added broadcast_416_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.6 MiB)
20:33:39.289 INFO  SparkContext - Created broadcast 416 from broadcast at DAGScheduler.scala:1580
20:33:39.289 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 209 (MapPartitionsRDD[989] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:39.289 INFO  TaskSchedulerImpl - Adding task set 209.0 with 1 tasks resource profile 0
20:33:39.290 INFO  TaskSetManager - Starting task 0.0 in stage 209.0 (TID 265) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:39.290 INFO  Executor - Running task 0.0 in stage 209.0 (TID 265)
20:33:39.311 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest59837365120138972489.cram:0+43713
20:33:39.321 INFO  Executor - Finished task 0.0 in stage 209.0 (TID 265). 989 bytes result sent to driver
20:33:39.321 INFO  TaskSetManager - Finished task 0.0 in stage 209.0 (TID 265) in 31 ms on localhost (executor driver) (1/1)
20:33:39.321 INFO  TaskSchedulerImpl - Removed TaskSet 209.0, whose tasks have all completed, from pool 
20:33:39.321 INFO  DAGScheduler - ResultStage 209 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.044 s
20:33:39.321 INFO  DAGScheduler - Job 156 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.321 INFO  TaskSchedulerImpl - Killing all running tasks in stage 209: Stage finished
20:33:39.322 INFO  DAGScheduler - Job 156 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.045367 s
20:33:39.330 INFO  MemoryStore - Block broadcast_417 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:39.341 INFO  MemoryStore - Block broadcast_417_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.8 MiB)
20:33:39.341 INFO  BlockManagerInfo - Added broadcast_417_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:39.341 INFO  SparkContext - Created broadcast 417 from newAPIHadoopFile at PathSplitSource.java:96
20:33:39.366 INFO  MemoryStore - Block broadcast_418 stored as values in memory (estimated size 297.9 KiB, free 1917.5 MiB)
20:33:39.372 INFO  MemoryStore - Block broadcast_418_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.5 MiB)
20:33:39.372 INFO  BlockManagerInfo - Added broadcast_418_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:39.373 INFO  SparkContext - Created broadcast 418 from newAPIHadoopFile at PathSplitSource.java:96
20:33:39.392 INFO  FileInputFormat - Total input files to process : 1
20:33:39.394 INFO  MemoryStore - Block broadcast_419 stored as values in memory (estimated size 160.7 KiB, free 1917.3 MiB)
20:33:39.395 INFO  MemoryStore - Block broadcast_419_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:39.395 INFO  BlockManagerInfo - Added broadcast_419_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:39.395 INFO  SparkContext - Created broadcast 419 from broadcast at ReadsSparkSink.java:133
20:33:39.398 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:39.398 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.398 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.416 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:39.416 INFO  DAGScheduler - Registering RDD 1003 (mapToPair at SparkUtils.java:161) as input to shuffle 42
20:33:39.416 INFO  DAGScheduler - Got job 157 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:39.416 INFO  DAGScheduler - Final stage: ResultStage 211 (runJob at SparkHadoopWriter.scala:83)
20:33:39.416 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 210)
20:33:39.416 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 210)
20:33:39.416 INFO  DAGScheduler - Submitting ShuffleMapStage 210 (MapPartitionsRDD[1003] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:39.435 INFO  MemoryStore - Block broadcast_420 stored as values in memory (estimated size 520.4 KiB, free 1916.8 MiB)
20:33:39.437 INFO  MemoryStore - Block broadcast_420_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.7 MiB)
20:33:39.437 INFO  BlockManagerInfo - Added broadcast_420_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.3 MiB)
20:33:39.437 INFO  SparkContext - Created broadcast 420 from broadcast at DAGScheduler.scala:1580
20:33:39.437 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 210 (MapPartitionsRDD[1003] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:39.437 INFO  TaskSchedulerImpl - Adding task set 210.0 with 1 tasks resource profile 0
20:33:39.438 INFO  TaskSetManager - Starting task 0.0 in stage 210.0 (TID 266) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:39.438 INFO  Executor - Running task 0.0 in stage 210.0 (TID 266)
20:33:39.470 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:39.485 INFO  Executor - Finished task 0.0 in stage 210.0 (TID 266). 1148 bytes result sent to driver
20:33:39.485 INFO  TaskSetManager - Finished task 0.0 in stage 210.0 (TID 266) in 48 ms on localhost (executor driver) (1/1)
20:33:39.485 INFO  TaskSchedulerImpl - Removed TaskSet 210.0, whose tasks have all completed, from pool 
20:33:39.485 INFO  DAGScheduler - ShuffleMapStage 210 (mapToPair at SparkUtils.java:161) finished in 0.068 s
20:33:39.485 INFO  DAGScheduler - looking for newly runnable stages
20:33:39.485 INFO  DAGScheduler - running: HashSet()
20:33:39.485 INFO  DAGScheduler - waiting: HashSet(ResultStage 211)
20:33:39.485 INFO  DAGScheduler - failed: HashSet()
20:33:39.485 INFO  DAGScheduler - Submitting ResultStage 211 (MapPartitionsRDD[1009] at saveAsTextFile at SamSink.java:65), which has no missing parents
20:33:39.492 INFO  MemoryStore - Block broadcast_421 stored as values in memory (estimated size 241.1 KiB, free 1916.4 MiB)
20:33:39.493 INFO  MemoryStore - Block broadcast_421_piece0 stored as bytes in memory (estimated size 66.9 KiB, free 1916.4 MiB)
20:33:39.493 INFO  BlockManagerInfo - Added broadcast_421_piece0 in memory on localhost:45281 (size: 66.9 KiB, free: 1919.3 MiB)
20:33:39.493 INFO  SparkContext - Created broadcast 421 from broadcast at DAGScheduler.scala:1580
20:33:39.493 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 211 (MapPartitionsRDD[1009] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
20:33:39.493 INFO  TaskSchedulerImpl - Adding task set 211.0 with 1 tasks resource profile 0
20:33:39.494 INFO  TaskSetManager - Starting task 0.0 in stage 211.0 (TID 267) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:39.494 INFO  Executor - Running task 0.0 in stage 211.0 (TID 267)
20:33:39.498 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:39.498 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:39.508 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:39.509 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.509 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.526 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033395562988972602683227_1009_m_000000_0' to file:/tmp/ReadsSparkSinkUnitTest6.someOtherPlace786114811945311201/_temporary/0/task_202507152033395562988972602683227_1009_m_000000
20:33:39.526 INFO  SparkHadoopMapRedUtil - attempt_202507152033395562988972602683227_1009_m_000000_0: Committed. Elapsed time: 0 ms.
20:33:39.526 INFO  Executor - Finished task 0.0 in stage 211.0 (TID 267). 1858 bytes result sent to driver
20:33:39.527 INFO  TaskSetManager - Finished task 0.0 in stage 211.0 (TID 267) in 33 ms on localhost (executor driver) (1/1)
20:33:39.527 INFO  TaskSchedulerImpl - Removed TaskSet 211.0, whose tasks have all completed, from pool 
20:33:39.527 INFO  DAGScheduler - ResultStage 211 (runJob at SparkHadoopWriter.scala:83) finished in 0.041 s
20:33:39.527 INFO  DAGScheduler - Job 157 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.527 INFO  TaskSchedulerImpl - Killing all running tasks in stage 211: Stage finished
20:33:39.527 INFO  DAGScheduler - Job 157 finished: runJob at SparkHadoopWriter.scala:83, took 0.111546 s
20:33:39.527 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033395562988972602683227_1009.
20:33:39.532 INFO  SparkHadoopWriter - Write Job job_202507152033395562988972602683227_1009 committed. Elapsed time: 4 ms.
20:33:39.540 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/ReadsSparkSinkUnitTest68620145664700517863.sam
20:33:39.544 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/ReadsSparkSinkUnitTest68620145664700517863.sam done
WARNING	2025-07-15 20:33:39	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
WARNING	2025-07-15 20:33:39	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:39.547 INFO  MemoryStore - Block broadcast_422 stored as values in memory (estimated size 160.7 KiB, free 1916.2 MiB)
20:33:39.548 INFO  MemoryStore - Block broadcast_422_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.2 MiB)
20:33:39.548 INFO  BlockManagerInfo - Added broadcast_422_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:39.548 INFO  SparkContext - Created broadcast 422 from broadcast at SamSource.java:78
20:33:39.549 INFO  MemoryStore - Block broadcast_423 stored as values in memory (estimated size 297.9 KiB, free 1915.9 MiB)
20:33:39.555 INFO  BlockManagerInfo - Removed broadcast_414_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.4 MiB)
20:33:39.555 INFO  BlockManagerInfo - Removed broadcast_412_piece0 on localhost:45281 in memory (size: 159.0 B, free: 1919.4 MiB)
20:33:39.555 INFO  BlockManagerInfo - Removed broadcast_420_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:39.556 INFO  BlockManagerInfo - Removed broadcast_405_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:39.556 INFO  BlockManagerInfo - Removed broadcast_419_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:39.557 INFO  BlockManagerInfo - Removed broadcast_413_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:39.557 INFO  BlockManagerInfo - Removed broadcast_404_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.6 MiB)
20:33:39.557 INFO  BlockManagerInfo - Removed broadcast_421_piece0 on localhost:45281 in memory (size: 66.9 KiB, free: 1919.7 MiB)
20:33:39.558 INFO  BlockManagerInfo - Removed broadcast_416_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.8 MiB)
20:33:39.558 INFO  BlockManagerInfo - Removed broadcast_415_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.9 MiB)
20:33:39.559 INFO  BlockManagerInfo - Removed broadcast_418_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.9 MiB)
20:33:39.560 INFO  MemoryStore - Block broadcast_423_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.2 MiB)
20:33:39.561 INFO  BlockManagerInfo - Added broadcast_423_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:39.561 INFO  SparkContext - Created broadcast 423 from newAPIHadoopFile at SamSource.java:108
20:33:39.563 INFO  FileInputFormat - Total input files to process : 1
20:33:39.566 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:39.567 INFO  DAGScheduler - Got job 158 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:39.567 INFO  DAGScheduler - Final stage: ResultStage 212 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:39.567 INFO  DAGScheduler - Parents of final stage: List()
20:33:39.567 INFO  DAGScheduler - Missing parents: List()
20:33:39.567 INFO  DAGScheduler - Submitting ResultStage 212 (MapPartitionsRDD[1014] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:39.567 INFO  MemoryStore - Block broadcast_424 stored as values in memory (estimated size 7.5 KiB, free 1919.1 MiB)
20:33:39.568 INFO  MemoryStore - Block broadcast_424_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1919.1 MiB)
20:33:39.568 INFO  BlockManagerInfo - Added broadcast_424_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.9 MiB)
20:33:39.568 INFO  SparkContext - Created broadcast 424 from broadcast at DAGScheduler.scala:1580
20:33:39.568 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 212 (MapPartitionsRDD[1014] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:39.568 INFO  TaskSchedulerImpl - Adding task set 212.0 with 1 tasks resource profile 0
20:33:39.569 INFO  TaskSetManager - Starting task 0.0 in stage 212.0 (TID 268) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:39.569 INFO  Executor - Running task 0.0 in stage 212.0 (TID 268)
20:33:39.570 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest68620145664700517863.sam:0+847558
20:33:39.581 INFO  Executor - Finished task 0.0 in stage 212.0 (TID 268). 651526 bytes result sent to driver
20:33:39.582 INFO  TaskSetManager - Finished task 0.0 in stage 212.0 (TID 268) in 13 ms on localhost (executor driver) (1/1)
20:33:39.582 INFO  TaskSchedulerImpl - Removed TaskSet 212.0, whose tasks have all completed, from pool 
20:33:39.583 INFO  DAGScheduler - ResultStage 212 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.016 s
20:33:39.583 INFO  DAGScheduler - Job 158 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.583 INFO  TaskSchedulerImpl - Killing all running tasks in stage 212: Stage finished
20:33:39.583 INFO  DAGScheduler - Job 158 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.016350 s
20:33:39.592 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:39.593 INFO  DAGScheduler - Got job 159 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:39.593 INFO  DAGScheduler - Final stage: ResultStage 213 (count at ReadsSparkSinkUnitTest.java:185)
20:33:39.593 INFO  DAGScheduler - Parents of final stage: List()
20:33:39.593 INFO  DAGScheduler - Missing parents: List()
20:33:39.593 INFO  DAGScheduler - Submitting ResultStage 213 (MapPartitionsRDD[996] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:39.610 INFO  MemoryStore - Block broadcast_425 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:39.611 INFO  MemoryStore - Block broadcast_425_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.6 MiB)
20:33:39.611 INFO  BlockManagerInfo - Added broadcast_425_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:39.611 INFO  SparkContext - Created broadcast 425 from broadcast at DAGScheduler.scala:1580
20:33:39.612 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 213 (MapPartitionsRDD[996] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:39.612 INFO  TaskSchedulerImpl - Adding task set 213.0 with 1 tasks resource profile 0
20:33:39.612 INFO  TaskSetManager - Starting task 0.0 in stage 213.0 (TID 269) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:39.612 INFO  Executor - Running task 0.0 in stage 213.0 (TID 269)
20:33:39.647 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:39.656 INFO  Executor - Finished task 0.0 in stage 213.0 (TID 269). 989 bytes result sent to driver
20:33:39.657 INFO  TaskSetManager - Finished task 0.0 in stage 213.0 (TID 269) in 45 ms on localhost (executor driver) (1/1)
20:33:39.657 INFO  TaskSchedulerImpl - Removed TaskSet 213.0, whose tasks have all completed, from pool 
20:33:39.657 INFO  DAGScheduler - ResultStage 213 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.064 s
20:33:39.657 INFO  DAGScheduler - Job 159 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.657 INFO  TaskSchedulerImpl - Killing all running tasks in stage 213: Stage finished
20:33:39.657 INFO  DAGScheduler - Job 159 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.064757 s
20:33:39.662 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:39.662 INFO  DAGScheduler - Got job 160 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:39.662 INFO  DAGScheduler - Final stage: ResultStage 214 (count at ReadsSparkSinkUnitTest.java:185)
20:33:39.662 INFO  DAGScheduler - Parents of final stage: List()
20:33:39.662 INFO  DAGScheduler - Missing parents: List()
20:33:39.662 INFO  DAGScheduler - Submitting ResultStage 214 (MapPartitionsRDD[1014] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:39.663 INFO  MemoryStore - Block broadcast_426 stored as values in memory (estimated size 7.4 KiB, free 1918.6 MiB)
20:33:39.663 INFO  MemoryStore - Block broadcast_426_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1918.6 MiB)
20:33:39.663 INFO  BlockManagerInfo - Added broadcast_426_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.7 MiB)
20:33:39.663 INFO  SparkContext - Created broadcast 426 from broadcast at DAGScheduler.scala:1580
20:33:39.663 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 214 (MapPartitionsRDD[1014] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:39.663 INFO  TaskSchedulerImpl - Adding task set 214.0 with 1 tasks resource profile 0
20:33:39.664 INFO  TaskSetManager - Starting task 0.0 in stage 214.0 (TID 270) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7809 bytes) 
20:33:39.664 INFO  Executor - Running task 0.0 in stage 214.0 (TID 270)
20:33:39.665 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest68620145664700517863.sam:0+847558
20:33:39.672 INFO  Executor - Finished task 0.0 in stage 214.0 (TID 270). 946 bytes result sent to driver
20:33:39.673 INFO  TaskSetManager - Finished task 0.0 in stage 214.0 (TID 270) in 9 ms on localhost (executor driver) (1/1)
20:33:39.673 INFO  TaskSchedulerImpl - Removed TaskSet 214.0, whose tasks have all completed, from pool 
20:33:39.673 INFO  DAGScheduler - ResultStage 214 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.011 s
20:33:39.673 INFO  DAGScheduler - Job 160 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:39.673 INFO  TaskSchedulerImpl - Killing all running tasks in stage 214: Stage finished
20:33:39.673 INFO  DAGScheduler - Job 160 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.011602 s
20:33:39.690 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:39.691 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:39.692 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:39.692 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:39.695 INFO  MemoryStore - Block broadcast_427 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:39.706 INFO  MemoryStore - Block broadcast_427_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.2 MiB)
20:33:39.706 INFO  BlockManagerInfo - Added broadcast_427_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:39.706 INFO  SparkContext - Created broadcast 427 from newAPIHadoopFile at PathSplitSource.java:96
20:33:39.735 INFO  MemoryStore - Block broadcast_428 stored as values in memory (estimated size 297.9 KiB, free 1917.9 MiB)
20:33:39.741 INFO  MemoryStore - Block broadcast_428_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.9 MiB)
20:33:39.741 INFO  BlockManagerInfo - Added broadcast_428_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:39.742 INFO  SparkContext - Created broadcast 428 from newAPIHadoopFile at PathSplitSource.java:96
20:33:39.761 INFO  FileInputFormat - Total input files to process : 1
20:33:39.763 INFO  MemoryStore - Block broadcast_429 stored as values in memory (estimated size 160.7 KiB, free 1917.7 MiB)
20:33:39.764 INFO  MemoryStore - Block broadcast_429_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.7 MiB)
20:33:39.764 INFO  BlockManagerInfo - Added broadcast_429_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:39.764 INFO  SparkContext - Created broadcast 429 from broadcast at ReadsSparkSink.java:133
20:33:39.765 INFO  MemoryStore - Block broadcast_430 stored as values in memory (estimated size 163.2 KiB, free 1917.6 MiB)
20:33:39.766 INFO  MemoryStore - Block broadcast_430_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.6 MiB)
20:33:39.766 INFO  BlockManagerInfo - Added broadcast_430_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.6 MiB)
20:33:39.766 INFO  SparkContext - Created broadcast 430 from broadcast at BamSink.java:76
20:33:39.768 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts	dst=null	perm=null	proto=rpc
20:33:39.769 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:39.769 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.769 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.769 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:39.780 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:39.780 INFO  DAGScheduler - Registering RDD 1028 (mapToPair at SparkUtils.java:161) as input to shuffle 43
20:33:39.780 INFO  DAGScheduler - Got job 161 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:39.780 INFO  DAGScheduler - Final stage: ResultStage 216 (runJob at SparkHadoopWriter.scala:83)
20:33:39.780 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 215)
20:33:39.781 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 215)
20:33:39.781 INFO  DAGScheduler - Submitting ShuffleMapStage 215 (MapPartitionsRDD[1028] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:39.798 INFO  MemoryStore - Block broadcast_431 stored as values in memory (estimated size 520.4 KiB, free 1917.0 MiB)
20:33:39.799 INFO  MemoryStore - Block broadcast_431_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.9 MiB)
20:33:39.799 INFO  BlockManagerInfo - Added broadcast_431_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.5 MiB)
20:33:39.800 INFO  SparkContext - Created broadcast 431 from broadcast at DAGScheduler.scala:1580
20:33:39.800 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 215 (MapPartitionsRDD[1028] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:39.800 INFO  TaskSchedulerImpl - Adding task set 215.0 with 1 tasks resource profile 0
20:33:39.800 INFO  TaskSetManager - Starting task 0.0 in stage 215.0 (TID 271) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:39.800 INFO  Executor - Running task 0.0 in stage 215.0 (TID 271)
20:33:39.832 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:39.847 INFO  Executor - Finished task 0.0 in stage 215.0 (TID 271). 1148 bytes result sent to driver
20:33:39.847 INFO  TaskSetManager - Finished task 0.0 in stage 215.0 (TID 271) in 47 ms on localhost (executor driver) (1/1)
20:33:39.847 INFO  TaskSchedulerImpl - Removed TaskSet 215.0, whose tasks have all completed, from pool 
20:33:39.847 INFO  DAGScheduler - ShuffleMapStage 215 (mapToPair at SparkUtils.java:161) finished in 0.066 s
20:33:39.847 INFO  DAGScheduler - looking for newly runnable stages
20:33:39.847 INFO  DAGScheduler - running: HashSet()
20:33:39.847 INFO  DAGScheduler - waiting: HashSet(ResultStage 216)
20:33:39.847 INFO  DAGScheduler - failed: HashSet()
20:33:39.847 INFO  DAGScheduler - Submitting ResultStage 216 (MapPartitionsRDD[1033] at mapToPair at BamSink.java:91), which has no missing parents
20:33:39.854 INFO  MemoryStore - Block broadcast_432 stored as values in memory (estimated size 241.5 KiB, free 1916.6 MiB)
20:33:39.855 INFO  MemoryStore - Block broadcast_432_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1916.6 MiB)
20:33:39.855 INFO  BlockManagerInfo - Added broadcast_432_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.4 MiB)
20:33:39.855 INFO  SparkContext - Created broadcast 432 from broadcast at DAGScheduler.scala:1580
20:33:39.855 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 216 (MapPartitionsRDD[1033] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:39.855 INFO  TaskSchedulerImpl - Adding task set 216.0 with 1 tasks resource profile 0
20:33:39.856 INFO  TaskSetManager - Starting task 0.0 in stage 216.0 (TID 272) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:39.856 INFO  Executor - Running task 0.0 in stage 216.0 (TID 272)
20:33:39.861 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:39.861 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:39.872 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:39.872 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.872 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.872 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:39.872 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:39.872 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:39.873 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:39.874 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:39.875 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:39.878 INFO  StateChange - BLOCK* allocate blk_1073741871_1047, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/part-r-00000
20:33:39.879 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741871_1047 src: /127.0.0.1:34294 dest: /127.0.0.1:35765
20:33:39.881 INFO  clienttrace - src: /127.0.0.1:34294, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741871_1047, duration(ns): 1404065
20:33:39.881 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741871_1047, type=LAST_IN_PIPELINE terminating
20:33:39.882 INFO  FSNamesystem - BLOCK* blk_1073741871_1047 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/part-r-00000
20:33:40.283 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:40.284 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:40.285 INFO  StateChange - BLOCK* allocate blk_1073741872_1048, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/.part-r-00000.sbi
20:33:40.286 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741872_1048 src: /127.0.0.1:34296 dest: /127.0.0.1:35765
20:33:40.287 INFO  clienttrace - src: /127.0.0.1:34296, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741872_1048, duration(ns): 460644
20:33:40.287 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741872_1048, type=LAST_IN_PIPELINE terminating
20:33:40.288 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:40.290 INFO  StateChange - BLOCK* allocate blk_1073741873_1049, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/.part-r-00000.bai
20:33:40.291 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741873_1049 src: /127.0.0.1:34306 dest: /127.0.0.1:35765
20:33:40.292 INFO  clienttrace - src: /127.0.0.1:34306, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741873_1049, duration(ns): 485897
20:33:40.292 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741873_1049, type=LAST_IN_PIPELINE terminating
20:33:40.293 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:40.293 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0	dst=null	perm=null	proto=rpc
20:33:40.294 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0	dst=null	perm=null	proto=rpc
20:33:40.295 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000	dst=null	perm=null	proto=rpc
20:33:40.295 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/_temporary/attempt_202507152033391088243569022734065_1033_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:40.295 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033391088243569022734065_1033_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000
20:33:40.295 INFO  SparkHadoopMapRedUtil - attempt_202507152033391088243569022734065_1033_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:40.296 INFO  Executor - Finished task 0.0 in stage 216.0 (TID 272). 1858 bytes result sent to driver
20:33:40.296 INFO  TaskSetManager - Finished task 0.0 in stage 216.0 (TID 272) in 440 ms on localhost (executor driver) (1/1)
20:33:40.296 INFO  TaskSchedulerImpl - Removed TaskSet 216.0, whose tasks have all completed, from pool 
20:33:40.297 INFO  DAGScheduler - ResultStage 216 (runJob at SparkHadoopWriter.scala:83) finished in 0.449 s
20:33:40.297 INFO  DAGScheduler - Job 161 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:40.297 INFO  TaskSchedulerImpl - Killing all running tasks in stage 216: Stage finished
20:33:40.297 INFO  DAGScheduler - Job 161 finished: runJob at SparkHadoopWriter.scala:83, took 0.516991 s
20:33:40.297 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033391088243569022734065_1033.
20:33:40.298 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:40.298 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts	dst=null	perm=null	proto=rpc
20:33:40.299 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000	dst=null	perm=null	proto=rpc
20:33:40.299 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:40.300 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:40.300 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:40.301 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:40.301 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:40.302 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary/0/task_202507152033391088243569022734065_1033_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:40.302 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:40.303 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:40.304 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:40.305 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.spark-staging-1033	dst=null	perm=null	proto=rpc
20:33:40.305 INFO  SparkHadoopWriter - Write Job job_202507152033391088243569022734065_1033 committed. Elapsed time: 7 ms.
20:33:40.305 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:40.307 INFO  StateChange - BLOCK* allocate blk_1073741874_1050, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/header
20:33:40.308 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741874_1050 src: /127.0.0.1:34312 dest: /127.0.0.1:35765
20:33:40.309 INFO  clienttrace - src: /127.0.0.1:34312, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741874_1050, duration(ns): 539448
20:33:40.309 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741874_1050, type=LAST_IN_PIPELINE terminating
20:33:40.310 INFO  FSNamesystem - BLOCK* blk_1073741874_1050 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/header
20:33:40.711 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:40.713 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:40.714 INFO  StateChange - BLOCK* allocate blk_1073741875_1051, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/terminator
20:33:40.715 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741875_1051 src: /127.0.0.1:34318 dest: /127.0.0.1:35765
20:33:40.716 INFO  clienttrace - src: /127.0.0.1:34318, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741875_1051, duration(ns): 465142
20:33:40.716 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741875_1051, type=LAST_IN_PIPELINE terminating
20:33:40.717 INFO  FSNamesystem - BLOCK* blk_1073741875_1051 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/terminator
20:33:41.118 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.119 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts	dst=null	perm=null	proto=rpc
20:33:41.120 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.120 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.121 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam
20:33:41.121 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.121 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.122 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.123 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.123 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam done
20:33:41.124 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.124 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi
20:33:41.124 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts	dst=null	perm=null	proto=rpc
20:33:41.125 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.126 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:41.126 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:41.128 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:41.128 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:41.129 INFO  StateChange - BLOCK* allocate blk_1073741876_1052, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi
20:33:41.130 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741876_1052 src: /127.0.0.1:34332 dest: /127.0.0.1:35765
20:33:41.131 INFO  clienttrace - src: /127.0.0.1:34332, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741876_1052, duration(ns): 462076
20:33:41.131 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741876_1052, type=LAST_IN_PIPELINE terminating
20:33:41.132 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.132 INFO  IndexFileMerger - Done merging .sbi files
20:33:41.132 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai
20:33:41.133 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts	dst=null	perm=null	proto=rpc
20:33:41.133 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.134 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:41.135 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:41.136 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.136 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:41.137 INFO  StateChange - BLOCK* allocate blk_1073741877_1053, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai
20:33:41.138 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741877_1053 src: /127.0.0.1:34342 dest: /127.0.0.1:35765
20:33:41.139 INFO  clienttrace - src: /127.0.0.1:34342, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741877_1053, duration(ns): 422872
20:33:41.140 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741877_1053, type=LAST_IN_PIPELINE terminating
20:33:41.140 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.140 INFO  IndexFileMerger - Done merging .bai files
20:33:41.141 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.parts	dst=null	perm=null	proto=rpc
20:33:41.150 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.158 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=null	proto=rpc
20:33:41.158 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=null	proto=rpc
20:33:41.159 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=null	proto=rpc
20:33:41.160 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.160 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.161 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.161 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.162 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.162 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.163 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.164 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:41.166 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.167 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.167 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:41.167 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=null	proto=rpc
20:33:41.168 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=null	proto=rpc
20:33:41.168 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.sbi	dst=null	perm=null	proto=rpc
20:33:41.169 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:41.169 INFO  MemoryStore - Block broadcast_433 stored as values in memory (estimated size 320.0 B, free 1916.6 MiB)
20:33:41.173 INFO  MemoryStore - Block broadcast_433_piece0 stored as bytes in memory (estimated size 233.0 B, free 1916.6 MiB)
20:33:41.173 INFO  BlockManagerInfo - Added broadcast_433_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.4 MiB)
20:33:41.174 INFO  SparkContext - Created broadcast 433 from broadcast at BamSource.java:104
20:33:41.174 INFO  BlockManagerInfo - Removed broadcast_431_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.6 MiB)
20:33:41.174 INFO  BlockManagerInfo - Removed broadcast_426_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1919.6 MiB)
20:33:41.174 INFO  BlockManagerInfo - Removed broadcast_432_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.6 MiB)
20:33:41.175 INFO  BlockManagerInfo - Removed broadcast_422_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.6 MiB)
20:33:41.175 INFO  MemoryStore - Block broadcast_434 stored as values in memory (estimated size 297.9 KiB, free 1917.3 MiB)
20:33:41.175 INFO  BlockManagerInfo - Removed broadcast_424_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1919.6 MiB)
20:33:41.176 INFO  BlockManagerInfo - Removed broadcast_417_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:41.176 INFO  BlockManagerInfo - Removed broadcast_430_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:41.177 INFO  BlockManagerInfo - Removed broadcast_423_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:41.177 INFO  BlockManagerInfo - Removed broadcast_425_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.9 MiB)
20:33:41.177 INFO  BlockManagerInfo - Removed broadcast_429_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.9 MiB)
20:33:41.178 INFO  BlockManagerInfo - Removed broadcast_428_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1920.0 MiB)
20:33:41.182 INFO  MemoryStore - Block broadcast_434_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:41.182 INFO  BlockManagerInfo - Added broadcast_434_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:41.183 INFO  SparkContext - Created broadcast 434 from newAPIHadoopFile at PathSplitSource.java:96
20:33:41.191 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.192 INFO  FileInputFormat - Total input files to process : 1
20:33:41.192 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.207 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:41.207 INFO  DAGScheduler - Got job 162 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:41.207 INFO  DAGScheduler - Final stage: ResultStage 217 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:41.207 INFO  DAGScheduler - Parents of final stage: List()
20:33:41.207 INFO  DAGScheduler - Missing parents: List()
20:33:41.208 INFO  DAGScheduler - Submitting ResultStage 217 (MapPartitionsRDD[1039] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:41.214 INFO  MemoryStore - Block broadcast_435 stored as values in memory (estimated size 148.2 KiB, free 1919.2 MiB)
20:33:41.214 INFO  MemoryStore - Block broadcast_435_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1919.1 MiB)
20:33:41.214 INFO  BlockManagerInfo - Added broadcast_435_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.8 MiB)
20:33:41.215 INFO  SparkContext - Created broadcast 435 from broadcast at DAGScheduler.scala:1580
20:33:41.215 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 217 (MapPartitionsRDD[1039] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:41.215 INFO  TaskSchedulerImpl - Adding task set 217.0 with 1 tasks resource profile 0
20:33:41.215 INFO  TaskSetManager - Starting task 0.0 in stage 217.0 (TID 273) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:41.216 INFO  Executor - Running task 0.0 in stage 217.0 (TID 273)
20:33:41.227 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam:0+237038
20:33:41.228 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.228 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.229 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.230 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.230 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.233 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.234 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.235 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:41.236 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:41.239 INFO  Executor - Finished task 0.0 in stage 217.0 (TID 273). 651526 bytes result sent to driver
20:33:41.241 INFO  TaskSetManager - Finished task 0.0 in stage 217.0 (TID 273) in 26 ms on localhost (executor driver) (1/1)
20:33:41.241 INFO  TaskSchedulerImpl - Removed TaskSet 217.0, whose tasks have all completed, from pool 
20:33:41.241 INFO  DAGScheduler - ResultStage 217 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.033 s
20:33:41.241 INFO  DAGScheduler - Job 162 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:41.241 INFO  TaskSchedulerImpl - Killing all running tasks in stage 217: Stage finished
20:33:41.241 INFO  DAGScheduler - Job 162 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.033879 s
20:33:41.251 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:41.251 INFO  DAGScheduler - Got job 163 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:41.251 INFO  DAGScheduler - Final stage: ResultStage 218 (count at ReadsSparkSinkUnitTest.java:185)
20:33:41.251 INFO  DAGScheduler - Parents of final stage: List()
20:33:41.251 INFO  DAGScheduler - Missing parents: List()
20:33:41.251 INFO  DAGScheduler - Submitting ResultStage 218 (MapPartitionsRDD[1021] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:41.268 INFO  MemoryStore - Block broadcast_436 stored as values in memory (estimated size 426.1 KiB, free 1918.7 MiB)
20:33:41.269 INFO  MemoryStore - Block broadcast_436_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1918.6 MiB)
20:33:41.269 INFO  BlockManagerInfo - Added broadcast_436_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.7 MiB)
20:33:41.270 INFO  SparkContext - Created broadcast 436 from broadcast at DAGScheduler.scala:1580
20:33:41.270 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 218 (MapPartitionsRDD[1021] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:41.270 INFO  TaskSchedulerImpl - Adding task set 218.0 with 1 tasks resource profile 0
20:33:41.270 INFO  TaskSetManager - Starting task 0.0 in stage 218.0 (TID 274) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:41.270 INFO  Executor - Running task 0.0 in stage 218.0 (TID 274)
20:33:41.305 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:41.315 INFO  Executor - Finished task 0.0 in stage 218.0 (TID 274). 989 bytes result sent to driver
20:33:41.315 INFO  TaskSetManager - Finished task 0.0 in stage 218.0 (TID 274) in 45 ms on localhost (executor driver) (1/1)
20:33:41.315 INFO  TaskSchedulerImpl - Removed TaskSet 218.0, whose tasks have all completed, from pool 
20:33:41.315 INFO  DAGScheduler - ResultStage 218 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.064 s
20:33:41.315 INFO  DAGScheduler - Job 163 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:41.315 INFO  TaskSchedulerImpl - Killing all running tasks in stage 218: Stage finished
20:33:41.316 INFO  DAGScheduler - Job 163 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.064953 s
20:33:41.319 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:41.319 INFO  DAGScheduler - Got job 164 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:41.319 INFO  DAGScheduler - Final stage: ResultStage 219 (count at ReadsSparkSinkUnitTest.java:185)
20:33:41.319 INFO  DAGScheduler - Parents of final stage: List()
20:33:41.319 INFO  DAGScheduler - Missing parents: List()
20:33:41.319 INFO  DAGScheduler - Submitting ResultStage 219 (MapPartitionsRDD[1039] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:41.325 INFO  MemoryStore - Block broadcast_437 stored as values in memory (estimated size 148.1 KiB, free 1918.4 MiB)
20:33:41.326 INFO  MemoryStore - Block broadcast_437_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.4 MiB)
20:33:41.326 INFO  BlockManagerInfo - Added broadcast_437_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:41.326 INFO  SparkContext - Created broadcast 437 from broadcast at DAGScheduler.scala:1580
20:33:41.327 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 219 (MapPartitionsRDD[1039] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:41.327 INFO  TaskSchedulerImpl - Adding task set 219.0 with 1 tasks resource profile 0
20:33:41.327 INFO  TaskSetManager - Starting task 0.0 in stage 219.0 (TID 275) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:41.327 INFO  Executor - Running task 0.0 in stage 219.0 (TID 275)
20:33:41.339 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam:0+237038
20:33:41.340 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.340 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam	dst=null	perm=null	proto=rpc
20:33:41.341 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.342 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.342 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_879b6c4c-b437-4373-9e9c-14819cb02a8f.bam.bai	dst=null	perm=null	proto=rpc
20:33:41.344 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:41.345 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.346 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:41.347 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:41.348 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:41.350 INFO  Executor - Finished task 0.0 in stage 219.0 (TID 275). 989 bytes result sent to driver
20:33:41.350 INFO  TaskSetManager - Finished task 0.0 in stage 219.0 (TID 275) in 23 ms on localhost (executor driver) (1/1)
20:33:41.350 INFO  TaskSchedulerImpl - Removed TaskSet 219.0, whose tasks have all completed, from pool 
20:33:41.350 INFO  DAGScheduler - ResultStage 219 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.030 s
20:33:41.350 INFO  DAGScheduler - Job 164 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:41.350 INFO  TaskSchedulerImpl - Killing all running tasks in stage 219: Stage finished
20:33:41.350 INFO  DAGScheduler - Job 164 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.031411 s
20:33:41.363 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:41.364 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.366 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.366 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:41.369 INFO  MemoryStore - Block broadcast_438 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:41.376 INFO  MemoryStore - Block broadcast_438_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:41.376 INFO  BlockManagerInfo - Added broadcast_438_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.6 MiB)
20:33:41.376 INFO  SparkContext - Created broadcast 438 from newAPIHadoopFile at PathSplitSource.java:96
20:33:41.399 INFO  MemoryStore - Block broadcast_439 stored as values in memory (estimated size 297.9 KiB, free 1917.7 MiB)
20:33:41.405 INFO  MemoryStore - Block broadcast_439_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.7 MiB)
20:33:41.405 INFO  BlockManagerInfo - Added broadcast_439_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:41.405 INFO  SparkContext - Created broadcast 439 from newAPIHadoopFile at PathSplitSource.java:96
20:33:41.425 INFO  FileInputFormat - Total input files to process : 1
20:33:41.428 INFO  MemoryStore - Block broadcast_440 stored as values in memory (estimated size 160.7 KiB, free 1917.5 MiB)
20:33:41.428 INFO  MemoryStore - Block broadcast_440_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.5 MiB)
20:33:41.429 INFO  BlockManagerInfo - Added broadcast_440_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:41.429 INFO  SparkContext - Created broadcast 440 from broadcast at ReadsSparkSink.java:133
20:33:41.430 INFO  MemoryStore - Block broadcast_441 stored as values in memory (estimated size 163.2 KiB, free 1917.4 MiB)
20:33:41.431 INFO  MemoryStore - Block broadcast_441_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.3 MiB)
20:33:41.431 INFO  BlockManagerInfo - Added broadcast_441_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:41.431 INFO  SparkContext - Created broadcast 441 from broadcast at BamSink.java:76
20:33:41.433 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts	dst=null	perm=null	proto=rpc
20:33:41.434 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:41.434 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:41.434 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:41.434 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:41.440 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:41.441 INFO  DAGScheduler - Registering RDD 1053 (mapToPair at SparkUtils.java:161) as input to shuffle 44
20:33:41.441 INFO  DAGScheduler - Got job 165 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:41.441 INFO  DAGScheduler - Final stage: ResultStage 221 (runJob at SparkHadoopWriter.scala:83)
20:33:41.441 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 220)
20:33:41.441 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 220)
20:33:41.441 INFO  DAGScheduler - Submitting ShuffleMapStage 220 (MapPartitionsRDD[1053] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:41.459 INFO  MemoryStore - Block broadcast_442 stored as values in memory (estimated size 520.4 KiB, free 1916.8 MiB)
20:33:41.460 INFO  MemoryStore - Block broadcast_442_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.7 MiB)
20:33:41.460 INFO  BlockManagerInfo - Added broadcast_442_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.4 MiB)
20:33:41.460 INFO  SparkContext - Created broadcast 442 from broadcast at DAGScheduler.scala:1580
20:33:41.461 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 220 (MapPartitionsRDD[1053] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:41.461 INFO  TaskSchedulerImpl - Adding task set 220.0 with 1 tasks resource profile 0
20:33:41.461 INFO  TaskSetManager - Starting task 0.0 in stage 220.0 (TID 276) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:41.461 INFO  Executor - Running task 0.0 in stage 220.0 (TID 276)
20:33:41.492 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:41.506 INFO  Executor - Finished task 0.0 in stage 220.0 (TID 276). 1148 bytes result sent to driver
20:33:41.507 INFO  TaskSetManager - Finished task 0.0 in stage 220.0 (TID 276) in 46 ms on localhost (executor driver) (1/1)
20:33:41.507 INFO  TaskSchedulerImpl - Removed TaskSet 220.0, whose tasks have all completed, from pool 
20:33:41.507 INFO  DAGScheduler - ShuffleMapStage 220 (mapToPair at SparkUtils.java:161) finished in 0.066 s
20:33:41.507 INFO  DAGScheduler - looking for newly runnable stages
20:33:41.507 INFO  DAGScheduler - running: HashSet()
20:33:41.507 INFO  DAGScheduler - waiting: HashSet(ResultStage 221)
20:33:41.507 INFO  DAGScheduler - failed: HashSet()
20:33:41.507 INFO  DAGScheduler - Submitting ResultStage 221 (MapPartitionsRDD[1058] at mapToPair at BamSink.java:91), which has no missing parents
20:33:41.516 INFO  MemoryStore - Block broadcast_443 stored as values in memory (estimated size 241.5 KiB, free 1916.4 MiB)
20:33:41.517 INFO  MemoryStore - Block broadcast_443_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1916.4 MiB)
20:33:41.517 INFO  BlockManagerInfo - Added broadcast_443_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.3 MiB)
20:33:41.517 INFO  SparkContext - Created broadcast 443 from broadcast at DAGScheduler.scala:1580
20:33:41.517 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 221 (MapPartitionsRDD[1058] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:41.517 INFO  TaskSchedulerImpl - Adding task set 221.0 with 1 tasks resource profile 0
20:33:41.518 INFO  TaskSetManager - Starting task 0.0 in stage 221.0 (TID 277) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:41.518 INFO  Executor - Running task 0.0 in stage 221.0 (TID 277)
20:33:41.522 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:41.522 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:41.533 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:41.533 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:41.533 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:41.533 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:41.533 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:41.533 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:41.534 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.535 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.536 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.538 INFO  StateChange - BLOCK* allocate blk_1073741878_1054, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/part-r-00000
20:33:41.540 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741878_1054 src: /127.0.0.1:34360 dest: /127.0.0.1:35765
20:33:41.542 INFO  clienttrace - src: /127.0.0.1:34360, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741878_1054, duration(ns): 1106528
20:33:41.542 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741878_1054, type=LAST_IN_PIPELINE terminating
20:33:41.543 INFO  FSNamesystem - BLOCK* blk_1073741878_1054 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/part-r-00000
20:33:41.944 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.945 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:41.946 INFO  StateChange - BLOCK* allocate blk_1073741879_1055, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/.part-r-00000.sbi
20:33:41.946 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741879_1055 src: /127.0.0.1:34372 dest: /127.0.0.1:35765
20:33:41.948 INFO  clienttrace - src: /127.0.0.1:34372, dest: /127.0.0.1:35765, bytes: 13492, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741879_1055, duration(ns): 462712
20:33:41.948 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741879_1055, type=LAST_IN_PIPELINE terminating
20:33:41.948 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.950 INFO  StateChange - BLOCK* allocate blk_1073741880_1056, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/.part-r-00000.bai
20:33:41.951 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741880_1056 src: /127.0.0.1:34382 dest: /127.0.0.1:35765
20:33:41.952 INFO  clienttrace - src: /127.0.0.1:34382, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741880_1056, duration(ns): 378196
20:33:41.952 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741880_1056, type=LAST_IN_PIPELINE terminating
20:33:41.953 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.953 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0	dst=null	perm=null	proto=rpc
20:33:41.954 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0	dst=null	perm=null	proto=rpc
20:33:41.954 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000	dst=null	perm=null	proto=rpc
20:33:41.955 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/_temporary/attempt_202507152033414303396610591826721_1058_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:41.955 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033414303396610591826721_1058_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000
20:33:41.955 INFO  SparkHadoopMapRedUtil - attempt_202507152033414303396610591826721_1058_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:41.959 INFO  Executor - Finished task 0.0 in stage 221.0 (TID 277). 1944 bytes result sent to driver
20:33:41.959 INFO  BlockManagerInfo - Removed broadcast_436_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.5 MiB)
20:33:41.960 INFO  TaskSetManager - Finished task 0.0 in stage 221.0 (TID 277) in 443 ms on localhost (executor driver) (1/1)
20:33:41.960 INFO  TaskSchedulerImpl - Removed TaskSet 221.0, whose tasks have all completed, from pool 
20:33:41.960 INFO  DAGScheduler - ResultStage 221 (runJob at SparkHadoopWriter.scala:83) finished in 0.452 s
20:33:41.960 INFO  DAGScheduler - Job 165 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:41.960 INFO  TaskSchedulerImpl - Killing all running tasks in stage 221: Stage finished
20:33:41.960 INFO  DAGScheduler - Job 165 finished: runJob at SparkHadoopWriter.scala:83, took 0.519881 s
20:33:41.961 INFO  BlockManagerInfo - Removed broadcast_435_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.5 MiB)
20:33:41.961 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033414303396610591826721_1058.
20:33:41.961 INFO  BlockManagerInfo - Removed broadcast_437_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.6 MiB)
20:33:41.961 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:41.962 INFO  BlockManagerInfo - Removed broadcast_433_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.6 MiB)
20:33:41.962 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts	dst=null	perm=null	proto=rpc
20:33:41.962 INFO  BlockManagerInfo - Removed broadcast_442_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.7 MiB)
20:33:41.963 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000	dst=null	perm=null	proto=rpc
20:33:41.963 INFO  BlockManagerInfo - Removed broadcast_434_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:41.963 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:41.963 INFO  BlockManagerInfo - Removed broadcast_427_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:41.964 INFO  BlockManagerInfo - Removed broadcast_439_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.9 MiB)
20:33:41.964 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.965 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:41.965 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.965 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:41.966 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary/0/task_202507152033414303396610591826721_1058_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.967 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:41.967 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.968 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.969 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.spark-staging-1058	dst=null	perm=null	proto=rpc
20:33:41.969 INFO  SparkHadoopWriter - Write Job job_202507152033414303396610591826721_1058 committed. Elapsed time: 8 ms.
20:33:41.969 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.971 INFO  StateChange - BLOCK* allocate blk_1073741881_1057, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/header
20:33:41.972 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741881_1057 src: /127.0.0.1:34390 dest: /127.0.0.1:35765
20:33:41.973 INFO  clienttrace - src: /127.0.0.1:34390, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741881_1057, duration(ns): 453408
20:33:41.973 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741881_1057, type=LAST_IN_PIPELINE terminating
20:33:41.974 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:41.974 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:41.975 INFO  StateChange - BLOCK* allocate blk_1073741882_1058, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/terminator
20:33:41.976 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741882_1058 src: /127.0.0.1:34400 dest: /127.0.0.1:35765
20:33:41.977 INFO  clienttrace - src: /127.0.0.1:34400, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741882_1058, duration(ns): 345677
20:33:41.977 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741882_1058, type=LAST_IN_PIPELINE terminating
20:33:41.977 INFO  FSNamesystem - BLOCK* blk_1073741882_1058 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/terminator
20:33:42.378 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:42.379 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts	dst=null	perm=null	proto=rpc
20:33:42.380 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:42.381 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:42.381 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam
20:33:42.381 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:42.382 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:42.382 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:42.383 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:42.383 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam done
20:33:42.383 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:42.383 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi
20:33:42.384 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts	dst=null	perm=null	proto=rpc
20:33:42.384 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:42.385 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:42.385 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:42.386 WARN  DFSUtil - Unexpected value for data transfer bytes=13600 duration=0
20:33:42.387 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:42.388 INFO  StateChange - BLOCK* allocate blk_1073741883_1059, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi
20:33:42.388 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741883_1059 src: /127.0.0.1:34402 dest: /127.0.0.1:35765
20:33:42.390 INFO  clienttrace - src: /127.0.0.1:34402, dest: /127.0.0.1:35765, bytes: 13492, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741883_1059, duration(ns): 427909
20:33:42.390 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741883_1059, type=LAST_IN_PIPELINE terminating
20:33:42.390 INFO  FSNamesystem - BLOCK* blk_1073741883_1059 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi
20:33:42.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741872_1048 replica FinalizedReplica, blk_1073741872_1048, FINALIZED
  getNumBytes()     = 212
  getBytesOnDisk()  = 212
  getVisibleLength()= 212
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741872 for deletion
20:33:42.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741873_1049 replica FinalizedReplica, blk_1073741873_1049, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741873 for deletion
20:33:42.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741872_1048 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741872
20:33:42.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741873_1049 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741873
20:33:42.791 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:42.791 INFO  IndexFileMerger - Done merging .sbi files
20:33:42.791 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai
20:33:42.792 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts	dst=null	perm=null	proto=rpc
20:33:42.793 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:42.793 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:42.794 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:42.795 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:42.795 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:42.797 INFO  StateChange - BLOCK* allocate blk_1073741884_1060, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai
20:33:42.798 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741884_1060 src: /127.0.0.1:34412 dest: /127.0.0.1:35765
20:33:42.799 INFO  clienttrace - src: /127.0.0.1:34412, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741884_1060, duration(ns): 540659
20:33:42.799 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741884_1060, type=LAST_IN_PIPELINE terminating
20:33:42.800 INFO  FSNamesystem - BLOCK* blk_1073741884_1060 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai
20:33:43.201 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:43.201 INFO  IndexFileMerger - Done merging .bai files
20:33:43.202 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.parts	dst=null	perm=null	proto=rpc
20:33:43.211 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.219 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=null	proto=rpc
20:33:43.219 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=null	proto=rpc
20:33:43.220 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=null	proto=rpc
20:33:43.221 WARN  DFSUtil - Unexpected value for data transfer bytes=13600 duration=0
20:33:43.221 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.222 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.222 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.223 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.223 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.224 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.224 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.227 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:43.228 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:43.228 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:43.228 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=null	proto=rpc
20:33:43.229 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=null	proto=rpc
20:33:43.229 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.sbi	dst=null	perm=null	proto=rpc
20:33:43.230 WARN  DFSUtil - Unexpected value for data transfer bytes=13600 duration=0
20:33:43.231 INFO  MemoryStore - Block broadcast_444 stored as values in memory (estimated size 13.3 KiB, free 1919.0 MiB)
20:33:43.231 INFO  MemoryStore - Block broadcast_444_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1919.0 MiB)
20:33:43.231 INFO  BlockManagerInfo - Added broadcast_444_piece0 in memory on localhost:45281 (size: 8.3 KiB, free: 1919.9 MiB)
20:33:43.232 INFO  SparkContext - Created broadcast 444 from broadcast at BamSource.java:104
20:33:43.233 INFO  MemoryStore - Block broadcast_445 stored as values in memory (estimated size 297.9 KiB, free 1918.7 MiB)
20:33:43.238 INFO  MemoryStore - Block broadcast_445_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.7 MiB)
20:33:43.239 INFO  BlockManagerInfo - Added broadcast_445_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:43.239 INFO  SparkContext - Created broadcast 445 from newAPIHadoopFile at PathSplitSource.java:96
20:33:43.248 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.248 INFO  FileInputFormat - Total input files to process : 1
20:33:43.248 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.263 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:43.263 INFO  DAGScheduler - Got job 166 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:43.263 INFO  DAGScheduler - Final stage: ResultStage 222 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:43.263 INFO  DAGScheduler - Parents of final stage: List()
20:33:43.263 INFO  DAGScheduler - Missing parents: List()
20:33:43.263 INFO  DAGScheduler - Submitting ResultStage 222 (MapPartitionsRDD[1064] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:43.269 INFO  MemoryStore - Block broadcast_446 stored as values in memory (estimated size 148.2 KiB, free 1918.5 MiB)
20:33:43.270 INFO  MemoryStore - Block broadcast_446_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.5 MiB)
20:33:43.270 INFO  BlockManagerInfo - Added broadcast_446_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.8 MiB)
20:33:43.270 INFO  SparkContext - Created broadcast 446 from broadcast at DAGScheduler.scala:1580
20:33:43.270 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 222 (MapPartitionsRDD[1064] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:43.270 INFO  TaskSchedulerImpl - Adding task set 222.0 with 1 tasks resource profile 0
20:33:43.271 INFO  TaskSetManager - Starting task 0.0 in stage 222.0 (TID 278) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:43.271 INFO  Executor - Running task 0.0 in stage 222.0 (TID 278)
20:33:43.282 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam:0+237038
20:33:43.283 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.284 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.284 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.285 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.285 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.287 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:43.289 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:43.289 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:43.290 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:43.291 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:43.293 INFO  Executor - Finished task 0.0 in stage 222.0 (TID 278). 651483 bytes result sent to driver
20:33:43.294 INFO  TaskSetManager - Finished task 0.0 in stage 222.0 (TID 278) in 23 ms on localhost (executor driver) (1/1)
20:33:43.294 INFO  TaskSchedulerImpl - Removed TaskSet 222.0, whose tasks have all completed, from pool 
20:33:43.294 INFO  DAGScheduler - ResultStage 222 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.031 s
20:33:43.294 INFO  DAGScheduler - Job 166 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:43.294 INFO  TaskSchedulerImpl - Killing all running tasks in stage 222: Stage finished
20:33:43.295 INFO  DAGScheduler - Job 166 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.031904 s
20:33:43.309 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:43.310 INFO  DAGScheduler - Got job 167 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:43.310 INFO  DAGScheduler - Final stage: ResultStage 223 (count at ReadsSparkSinkUnitTest.java:185)
20:33:43.310 INFO  DAGScheduler - Parents of final stage: List()
20:33:43.310 INFO  DAGScheduler - Missing parents: List()
20:33:43.310 INFO  DAGScheduler - Submitting ResultStage 223 (MapPartitionsRDD[1046] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:43.327 INFO  MemoryStore - Block broadcast_447 stored as values in memory (estimated size 426.1 KiB, free 1918.0 MiB)
20:33:43.328 INFO  MemoryStore - Block broadcast_447_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.9 MiB)
20:33:43.328 INFO  BlockManagerInfo - Added broadcast_447_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.6 MiB)
20:33:43.328 INFO  SparkContext - Created broadcast 447 from broadcast at DAGScheduler.scala:1580
20:33:43.328 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 223 (MapPartitionsRDD[1046] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:43.328 INFO  TaskSchedulerImpl - Adding task set 223.0 with 1 tasks resource profile 0
20:33:43.329 INFO  TaskSetManager - Starting task 0.0 in stage 223.0 (TID 279) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:43.329 INFO  Executor - Running task 0.0 in stage 223.0 (TID 279)
20:33:43.359 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:43.369 INFO  Executor - Finished task 0.0 in stage 223.0 (TID 279). 989 bytes result sent to driver
20:33:43.369 INFO  TaskSetManager - Finished task 0.0 in stage 223.0 (TID 279) in 40 ms on localhost (executor driver) (1/1)
20:33:43.369 INFO  TaskSchedulerImpl - Removed TaskSet 223.0, whose tasks have all completed, from pool 
20:33:43.370 INFO  DAGScheduler - ResultStage 223 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.059 s
20:33:43.370 INFO  DAGScheduler - Job 167 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:43.370 INFO  TaskSchedulerImpl - Killing all running tasks in stage 223: Stage finished
20:33:43.370 INFO  DAGScheduler - Job 167 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.060312 s
20:33:43.373 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:43.373 INFO  DAGScheduler - Got job 168 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:43.373 INFO  DAGScheduler - Final stage: ResultStage 224 (count at ReadsSparkSinkUnitTest.java:185)
20:33:43.373 INFO  DAGScheduler - Parents of final stage: List()
20:33:43.373 INFO  DAGScheduler - Missing parents: List()
20:33:43.373 INFO  DAGScheduler - Submitting ResultStage 224 (MapPartitionsRDD[1064] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:43.379 INFO  MemoryStore - Block broadcast_448 stored as values in memory (estimated size 148.1 KiB, free 1917.8 MiB)
20:33:43.380 INFO  MemoryStore - Block broadcast_448_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.7 MiB)
20:33:43.380 INFO  BlockManagerInfo - Added broadcast_448_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:43.380 INFO  SparkContext - Created broadcast 448 from broadcast at DAGScheduler.scala:1580
20:33:43.380 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 224 (MapPartitionsRDD[1064] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:43.380 INFO  TaskSchedulerImpl - Adding task set 224.0 with 1 tasks resource profile 0
20:33:43.381 INFO  TaskSetManager - Starting task 0.0 in stage 224.0 (TID 280) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:43.381 INFO  Executor - Running task 0.0 in stage 224.0 (TID 280)
20:33:43.393 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam:0+237038
20:33:43.393 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.394 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam	dst=null	perm=null	proto=rpc
20:33:43.395 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.395 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.396 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_44a2040e-8f5e-4026-932c-7f7f66c8f935.bam.bai	dst=null	perm=null	proto=rpc
20:33:43.397 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:43.399 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:43.401 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:43.402 INFO  Executor - Finished task 0.0 in stage 224.0 (TID 280). 989 bytes result sent to driver
20:33:43.402 INFO  TaskSetManager - Finished task 0.0 in stage 224.0 (TID 280) in 21 ms on localhost (executor driver) (1/1)
20:33:43.402 INFO  TaskSchedulerImpl - Removed TaskSet 224.0, whose tasks have all completed, from pool 
20:33:43.402 INFO  DAGScheduler - ResultStage 224 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.028 s
20:33:43.402 INFO  DAGScheduler - Job 168 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:43.403 INFO  TaskSchedulerImpl - Killing all running tasks in stage 224: Stage finished
20:33:43.403 INFO  DAGScheduler - Job 168 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.029580 s
20:33:43.414 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:43.415 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:43.416 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:43.416 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:43.420 INFO  MemoryStore - Block broadcast_449 stored as values in memory (estimated size 297.9 KiB, free 1917.4 MiB)
20:33:43.430 INFO  MemoryStore - Block broadcast_449_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.4 MiB)
20:33:43.430 INFO  BlockManagerInfo - Added broadcast_449_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:43.430 INFO  SparkContext - Created broadcast 449 from newAPIHadoopFile at PathSplitSource.java:96
20:33:43.466 INFO  MemoryStore - Block broadcast_450 stored as values in memory (estimated size 297.9 KiB, free 1917.1 MiB)
20:33:43.476 INFO  MemoryStore - Block broadcast_450_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.0 MiB)
20:33:43.476 INFO  BlockManagerInfo - Added broadcast_450_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:43.476 INFO  SparkContext - Created broadcast 450 from newAPIHadoopFile at PathSplitSource.java:96
20:33:43.497 INFO  FileInputFormat - Total input files to process : 1
20:33:43.499 INFO  MemoryStore - Block broadcast_451 stored as values in memory (estimated size 160.7 KiB, free 1916.9 MiB)
20:33:43.499 INFO  MemoryStore - Block broadcast_451_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.9 MiB)
20:33:43.500 INFO  BlockManagerInfo - Added broadcast_451_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:43.500 INFO  SparkContext - Created broadcast 451 from broadcast at ReadsSparkSink.java:133
20:33:43.500 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:43.501 INFO  MemoryStore - Block broadcast_452 stored as values in memory (estimated size 163.2 KiB, free 1916.7 MiB)
20:33:43.502 INFO  MemoryStore - Block broadcast_452_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.7 MiB)
20:33:43.502 INFO  BlockManagerInfo - Added broadcast_452_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.4 MiB)
20:33:43.502 INFO  SparkContext - Created broadcast 452 from broadcast at BamSink.java:76
20:33:43.504 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts	dst=null	perm=null	proto=rpc
20:33:43.504 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:43.504 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:43.504 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:43.505 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:43.515 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:43.515 INFO  DAGScheduler - Registering RDD 1078 (mapToPair at SparkUtils.java:161) as input to shuffle 45
20:33:43.515 INFO  DAGScheduler - Got job 169 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:43.515 INFO  DAGScheduler - Final stage: ResultStage 226 (runJob at SparkHadoopWriter.scala:83)
20:33:43.515 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 225)
20:33:43.516 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 225)
20:33:43.516 INFO  DAGScheduler - Submitting ShuffleMapStage 225 (MapPartitionsRDD[1078] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:43.533 INFO  MemoryStore - Block broadcast_453 stored as values in memory (estimated size 520.4 KiB, free 1916.2 MiB)
20:33:43.534 INFO  MemoryStore - Block broadcast_453_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.0 MiB)
20:33:43.535 INFO  BlockManagerInfo - Added broadcast_453_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.3 MiB)
20:33:43.535 INFO  SparkContext - Created broadcast 453 from broadcast at DAGScheduler.scala:1580
20:33:43.535 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 225 (MapPartitionsRDD[1078] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:43.535 INFO  TaskSchedulerImpl - Adding task set 225.0 with 1 tasks resource profile 0
20:33:43.535 INFO  TaskSetManager - Starting task 0.0 in stage 225.0 (TID 281) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:43.536 INFO  Executor - Running task 0.0 in stage 225.0 (TID 281)
20:33:43.565 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:43.580 INFO  Executor - Finished task 0.0 in stage 225.0 (TID 281). 1148 bytes result sent to driver
20:33:43.580 INFO  TaskSetManager - Finished task 0.0 in stage 225.0 (TID 281) in 45 ms on localhost (executor driver) (1/1)
20:33:43.580 INFO  TaskSchedulerImpl - Removed TaskSet 225.0, whose tasks have all completed, from pool 
20:33:43.580 INFO  DAGScheduler - ShuffleMapStage 225 (mapToPair at SparkUtils.java:161) finished in 0.064 s
20:33:43.580 INFO  DAGScheduler - looking for newly runnable stages
20:33:43.580 INFO  DAGScheduler - running: HashSet()
20:33:43.580 INFO  DAGScheduler - waiting: HashSet(ResultStage 226)
20:33:43.580 INFO  DAGScheduler - failed: HashSet()
20:33:43.581 INFO  DAGScheduler - Submitting ResultStage 226 (MapPartitionsRDD[1083] at mapToPair at BamSink.java:91), which has no missing parents
20:33:43.587 INFO  MemoryStore - Block broadcast_454 stored as values in memory (estimated size 241.5 KiB, free 1915.8 MiB)
20:33:43.591 INFO  BlockManagerInfo - Removed broadcast_448_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.3 MiB)
20:33:43.592 INFO  MemoryStore - Block broadcast_454_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1915.9 MiB)
20:33:43.592 INFO  BlockManagerInfo - Added broadcast_454_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.3 MiB)
20:33:43.592 INFO  BlockManagerInfo - Removed broadcast_447_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:43.592 INFO  SparkContext - Created broadcast 454 from broadcast at DAGScheduler.scala:1580
20:33:43.592 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 226 (MapPartitionsRDD[1083] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:43.592 INFO  BlockManagerInfo - Removed broadcast_440_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:43.592 INFO  TaskSchedulerImpl - Adding task set 226.0 with 1 tasks resource profile 0
20:33:43.593 INFO  BlockManagerInfo - Removed broadcast_441_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.4 MiB)
20:33:43.593 INFO  TaskSetManager - Starting task 0.0 in stage 226.0 (TID 282) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:43.593 INFO  Executor - Running task 0.0 in stage 226.0 (TID 282)
20:33:43.593 INFO  BlockManagerInfo - Removed broadcast_446_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.5 MiB)
20:33:43.594 INFO  BlockManagerInfo - Removed broadcast_444_piece0 on localhost:45281 in memory (size: 8.3 KiB, free: 1919.5 MiB)
20:33:43.595 INFO  BlockManagerInfo - Removed broadcast_450_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:43.595 INFO  BlockManagerInfo - Removed broadcast_443_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.6 MiB)
20:33:43.595 INFO  BlockManagerInfo - Removed broadcast_445_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:43.596 INFO  BlockManagerInfo - Removed broadcast_438_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:43.598 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:43.598 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:43.610 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:43.610 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:43.610 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:43.610 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:43.610 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:43.610 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:43.611 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:43.612 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:43.614 INFO  StateChange - BLOCK* allocate blk_1073741885_1061, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/part-r-00000
20:33:43.615 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741885_1061 src: /127.0.0.1:34430 dest: /127.0.0.1:35765
20:33:43.617 INFO  clienttrace - src: /127.0.0.1:34430, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741885_1061, duration(ns): 1205921
20:33:43.617 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741885_1061, type=LAST_IN_PIPELINE terminating
20:33:43.618 INFO  FSNamesystem - BLOCK* blk_1073741885_1061 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/part-r-00000
20:33:44.019 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:44.020 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:44.021 INFO  StateChange - BLOCK* allocate blk_1073741886_1062, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/.part-r-00000.bai
20:33:44.022 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741886_1062 src: /127.0.0.1:34436 dest: /127.0.0.1:35765
20:33:44.023 INFO  clienttrace - src: /127.0.0.1:34436, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741886_1062, duration(ns): 434452
20:33:44.023 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741886_1062, type=LAST_IN_PIPELINE terminating
20:33:44.024 INFO  FSNamesystem - BLOCK* blk_1073741886_1062 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/.part-r-00000.bai
20:33:44.425 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:44.426 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0	dst=null	perm=null	proto=rpc
20:33:44.426 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0	dst=null	perm=null	proto=rpc
20:33:44.427 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/task_202507152033433746049003444225604_1083_r_000000	dst=null	perm=null	proto=rpc
20:33:44.427 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/_temporary/attempt_202507152033433746049003444225604_1083_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/task_202507152033433746049003444225604_1083_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:44.427 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033433746049003444225604_1083_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/task_202507152033433746049003444225604_1083_r_000000
20:33:44.427 INFO  SparkHadoopMapRedUtil - attempt_202507152033433746049003444225604_1083_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:44.428 INFO  Executor - Finished task 0.0 in stage 226.0 (TID 282). 1858 bytes result sent to driver
20:33:44.428 INFO  TaskSetManager - Finished task 0.0 in stage 226.0 (TID 282) in 835 ms on localhost (executor driver) (1/1)
20:33:44.428 INFO  TaskSchedulerImpl - Removed TaskSet 226.0, whose tasks have all completed, from pool 
20:33:44.428 INFO  DAGScheduler - ResultStage 226 (runJob at SparkHadoopWriter.scala:83) finished in 0.847 s
20:33:44.429 INFO  DAGScheduler - Job 169 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:44.429 INFO  TaskSchedulerImpl - Killing all running tasks in stage 226: Stage finished
20:33:44.429 INFO  DAGScheduler - Job 169 finished: runJob at SparkHadoopWriter.scala:83, took 0.913849 s
20:33:44.429 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033433746049003444225604_1083.
20:33:44.430 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:44.430 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts	dst=null	perm=null	proto=rpc
20:33:44.430 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/task_202507152033433746049003444225604_1083_r_000000	dst=null	perm=null	proto=rpc
20:33:44.431 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:44.431 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/task_202507152033433746049003444225604_1083_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:44.432 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:44.432 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary/0/task_202507152033433746049003444225604_1083_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:44.433 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:44.433 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:44.434 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:44.435 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/.spark-staging-1083	dst=null	perm=null	proto=rpc
20:33:44.435 INFO  SparkHadoopWriter - Write Job job_202507152033433746049003444225604_1083 committed. Elapsed time: 5 ms.
20:33:44.435 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:44.437 INFO  StateChange - BLOCK* allocate blk_1073741887_1063, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/header
20:33:44.437 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741887_1063 src: /127.0.0.1:34448 dest: /127.0.0.1:35765
20:33:44.439 INFO  clienttrace - src: /127.0.0.1:34448, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741887_1063, duration(ns): 460224
20:33:44.439 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741887_1063, type=LAST_IN_PIPELINE terminating
20:33:44.440 INFO  FSNamesystem - BLOCK* blk_1073741887_1063 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/header
20:33:44.840 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:44.842 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:44.843 INFO  StateChange - BLOCK* allocate blk_1073741888_1064, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/terminator
20:33:44.844 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741888_1064 src: /127.0.0.1:34458 dest: /127.0.0.1:35765
20:33:44.845 INFO  clienttrace - src: /127.0.0.1:34458, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741888_1064, duration(ns): 417400
20:33:44.845 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741888_1064, type=LAST_IN_PIPELINE terminating
20:33:44.845 INFO  FSNamesystem - BLOCK* blk_1073741888_1064 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/terminator
20:33:45.246 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:45.247 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts	dst=null	perm=null	proto=rpc
20:33:45.248 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.249 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:45.249 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam
20:33:45.250 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.250 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.251 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.251 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.251 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam done
20:33:45.252 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.252 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai
20:33:45.252 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts	dst=null	perm=null	proto=rpc
20:33:45.253 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.253 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:45.254 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:45.255 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.255 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:45.256 INFO  StateChange - BLOCK* allocate blk_1073741889_1065, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai
20:33:45.257 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741889_1065 src: /127.0.0.1:34472 dest: /127.0.0.1:35765
20:33:45.258 INFO  clienttrace - src: /127.0.0.1:34472, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741889_1065, duration(ns): 465209
20:33:45.258 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741889_1065, type=LAST_IN_PIPELINE terminating
20:33:45.259 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:45.259 INFO  IndexFileMerger - Done merging .bai files
20:33:45.259 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.parts	dst=null	perm=null	proto=rpc
20:33:45.268 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.269 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.269 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.270 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.270 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.271 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.271 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.271 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.273 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.274 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.275 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.275 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.275 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.sbi	dst=null	perm=null	proto=rpc
20:33:45.276 INFO  MemoryStore - Block broadcast_455 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:45.282 INFO  MemoryStore - Block broadcast_455_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:45.283 INFO  BlockManagerInfo - Added broadcast_455_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:45.283 INFO  SparkContext - Created broadcast 455 from newAPIHadoopFile at PathSplitSource.java:96
20:33:45.303 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.303 INFO  FileInputFormat - Total input files to process : 1
20:33:45.304 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.339 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:45.339 INFO  DAGScheduler - Got job 170 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:45.339 INFO  DAGScheduler - Final stage: ResultStage 227 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:45.339 INFO  DAGScheduler - Parents of final stage: List()
20:33:45.339 INFO  DAGScheduler - Missing parents: List()
20:33:45.340 INFO  DAGScheduler - Submitting ResultStage 227 (MapPartitionsRDD[1090] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:45.356 INFO  MemoryStore - Block broadcast_456 stored as values in memory (estimated size 426.2 KiB, free 1917.6 MiB)
20:33:45.357 INFO  MemoryStore - Block broadcast_456_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.4 MiB)
20:33:45.358 INFO  BlockManagerInfo - Added broadcast_456_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:45.358 INFO  SparkContext - Created broadcast 456 from broadcast at DAGScheduler.scala:1580
20:33:45.358 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 227 (MapPartitionsRDD[1090] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:45.358 INFO  TaskSchedulerImpl - Adding task set 227.0 with 1 tasks resource profile 0
20:33:45.358 INFO  TaskSetManager - Starting task 0.0 in stage 227.0 (TID 283) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:45.359 INFO  Executor - Running task 0.0 in stage 227.0 (TID 283)
20:33:45.388 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam:0+237038
20:33:45.389 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.390 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.391 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.391 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.392 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.392 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.393 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.393 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.395 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.397 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.397 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.398 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.398 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.399 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741879_1055 replica FinalizedReplica, blk_1073741879_1055, FINALIZED
  getNumBytes()     = 13492
  getBytesOnDisk()  = 13492
  getVisibleLength()= 13492
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741879 for deletion
20:33:45.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741880_1056 replica FinalizedReplica, blk_1073741880_1056, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741880 for deletion
20:33:45.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741879_1055 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741879
20:33:45.403 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741880_1056 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741880
20:33:45.403 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.404 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.405 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.405 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.406 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.407 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.407 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.408 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.409 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.409 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.410 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.411 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.411 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.412 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.412 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.413 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.413 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.414 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.415 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.415 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.416 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.417 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.418 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.419 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.419 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.420 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.421 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.421 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.422 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.423 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.424 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.424 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.426 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.427 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.429 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.430 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.432 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.433 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.435 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.435 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.436 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.437 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.438 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.439 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.439 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.440 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.441 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.443 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.443 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.444 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.445 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.446 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.447 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.448 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.449 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.450 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.450 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.450 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.451 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.451 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.452 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.452 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.454 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.455 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.456 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.457 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.457 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:45.459 INFO  Executor - Finished task 0.0 in stage 227.0 (TID 283). 651483 bytes result sent to driver
20:33:45.460 INFO  TaskSetManager - Finished task 0.0 in stage 227.0 (TID 283) in 102 ms on localhost (executor driver) (1/1)
20:33:45.461 INFO  TaskSchedulerImpl - Removed TaskSet 227.0, whose tasks have all completed, from pool 
20:33:45.461 INFO  DAGScheduler - ResultStage 227 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.121 s
20:33:45.461 INFO  DAGScheduler - Job 170 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:45.461 INFO  TaskSchedulerImpl - Killing all running tasks in stage 227: Stage finished
20:33:45.461 INFO  DAGScheduler - Job 170 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.121904 s
20:33:45.470 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:45.471 INFO  DAGScheduler - Got job 171 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:45.471 INFO  DAGScheduler - Final stage: ResultStage 228 (count at ReadsSparkSinkUnitTest.java:185)
20:33:45.471 INFO  DAGScheduler - Parents of final stage: List()
20:33:45.471 INFO  DAGScheduler - Missing parents: List()
20:33:45.471 INFO  DAGScheduler - Submitting ResultStage 228 (MapPartitionsRDD[1071] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:45.491 INFO  MemoryStore - Block broadcast_457 stored as values in memory (estimated size 426.1 KiB, free 1917.0 MiB)
20:33:45.492 INFO  MemoryStore - Block broadcast_457_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.9 MiB)
20:33:45.492 INFO  BlockManagerInfo - Added broadcast_457_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.4 MiB)
20:33:45.492 INFO  SparkContext - Created broadcast 457 from broadcast at DAGScheduler.scala:1580
20:33:45.492 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 228 (MapPartitionsRDD[1071] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:45.492 INFO  TaskSchedulerImpl - Adding task set 228.0 with 1 tasks resource profile 0
20:33:45.493 INFO  TaskSetManager - Starting task 0.0 in stage 228.0 (TID 284) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:45.493 INFO  Executor - Running task 0.0 in stage 228.0 (TID 284)
20:33:45.522 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:45.532 INFO  Executor - Finished task 0.0 in stage 228.0 (TID 284). 989 bytes result sent to driver
20:33:45.532 INFO  TaskSetManager - Finished task 0.0 in stage 228.0 (TID 284) in 39 ms on localhost (executor driver) (1/1)
20:33:45.532 INFO  TaskSchedulerImpl - Removed TaskSet 228.0, whose tasks have all completed, from pool 
20:33:45.532 INFO  DAGScheduler - ResultStage 228 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.061 s
20:33:45.532 INFO  DAGScheduler - Job 171 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:45.532 INFO  TaskSchedulerImpl - Killing all running tasks in stage 228: Stage finished
20:33:45.533 INFO  DAGScheduler - Job 171 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061997 s
20:33:45.537 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:45.537 INFO  DAGScheduler - Got job 172 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:45.537 INFO  DAGScheduler - Final stage: ResultStage 229 (count at ReadsSparkSinkUnitTest.java:185)
20:33:45.537 INFO  DAGScheduler - Parents of final stage: List()
20:33:45.537 INFO  DAGScheduler - Missing parents: List()
20:33:45.538 INFO  DAGScheduler - Submitting ResultStage 229 (MapPartitionsRDD[1090] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:45.554 INFO  MemoryStore - Block broadcast_458 stored as values in memory (estimated size 426.1 KiB, free 1916.5 MiB)
20:33:45.556 INFO  MemoryStore - Block broadcast_458_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.3 MiB)
20:33:45.556 INFO  BlockManagerInfo - Added broadcast_458_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.2 MiB)
20:33:45.556 INFO  SparkContext - Created broadcast 458 from broadcast at DAGScheduler.scala:1580
20:33:45.556 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 229 (MapPartitionsRDD[1090] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:45.556 INFO  TaskSchedulerImpl - Adding task set 229.0 with 1 tasks resource profile 0
20:33:45.557 INFO  TaskSetManager - Starting task 0.0 in stage 229.0 (TID 285) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:45.557 INFO  Executor - Running task 0.0 in stage 229.0 (TID 285)
20:33:45.586 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam:0+237038
20:33:45.587 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.588 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.589 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.589 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.590 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.590 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.591 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.591 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.594 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.595 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.595 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.596 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.596 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.600 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.601 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.601 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.602 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.603 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.603 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.604 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.604 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.605 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.606 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.606 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.607 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.608 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.608 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.609 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.610 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.611 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.612 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.612 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.613 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.614 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.615 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.615 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.616 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.616 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.617 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.618 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.618 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.619 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.619 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.620 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.620 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.621 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.622 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.622 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.623 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.624 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.625 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.625 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.626 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.627 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.627 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.628 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.629 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.629 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.630 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.631 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.632 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.634 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.635 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.637 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.638 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.639 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.639 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.640 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.641 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.642 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:45.642 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.642 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.643 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.644 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam	dst=null	perm=null	proto=rpc
20:33:45.645 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.645 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.645 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_1c4d38cb-1ce4-45c1-af5b-531c14548507.bam.bai	dst=null	perm=null	proto=rpc
20:33:45.647 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:45.649 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.650 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:45.651 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:45.653 INFO  Executor - Finished task 0.0 in stage 229.0 (TID 285). 989 bytes result sent to driver
20:33:45.654 INFO  TaskSetManager - Finished task 0.0 in stage 229.0 (TID 285) in 97 ms on localhost (executor driver) (1/1)
20:33:45.654 INFO  TaskSchedulerImpl - Removed TaskSet 229.0, whose tasks have all completed, from pool 
20:33:45.654 INFO  DAGScheduler - ResultStage 229 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.116 s
20:33:45.654 INFO  DAGScheduler - Job 172 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:45.654 INFO  TaskSchedulerImpl - Killing all running tasks in stage 229: Stage finished
20:33:45.654 INFO  DAGScheduler - Job 172 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.117031 s
20:33:45.667 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:45.668 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.669 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:45.669 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:45.672 INFO  MemoryStore - Block broadcast_459 stored as values in memory (estimated size 297.9 KiB, free 1916.0 MiB)
20:33:45.678 INFO  MemoryStore - Block broadcast_459_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.0 MiB)
20:33:45.678 INFO  BlockManagerInfo - Added broadcast_459_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.2 MiB)
20:33:45.678 INFO  SparkContext - Created broadcast 459 from newAPIHadoopFile at PathSplitSource.java:96
20:33:45.699 INFO  MemoryStore - Block broadcast_460 stored as values in memory (estimated size 297.9 KiB, free 1915.7 MiB)
20:33:45.705 INFO  MemoryStore - Block broadcast_460_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1915.6 MiB)
20:33:45.705 INFO  BlockManagerInfo - Added broadcast_460_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.1 MiB)
20:33:45.706 INFO  SparkContext - Created broadcast 460 from newAPIHadoopFile at PathSplitSource.java:96
20:33:45.725 INFO  FileInputFormat - Total input files to process : 1
20:33:45.727 INFO  MemoryStore - Block broadcast_461 stored as values in memory (estimated size 160.7 KiB, free 1915.5 MiB)
20:33:45.727 INFO  MemoryStore - Block broadcast_461_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1915.5 MiB)
20:33:45.728 INFO  BlockManagerInfo - Added broadcast_461_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.1 MiB)
20:33:45.728 INFO  SparkContext - Created broadcast 461 from broadcast at ReadsSparkSink.java:133
20:33:45.728 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:45.729 INFO  MemoryStore - Block broadcast_462 stored as values in memory (estimated size 163.2 KiB, free 1915.3 MiB)
20:33:45.730 INFO  MemoryStore - Block broadcast_462_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1915.3 MiB)
20:33:45.730 INFO  BlockManagerInfo - Added broadcast_462_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.1 MiB)
20:33:45.730 INFO  SparkContext - Created broadcast 462 from broadcast at BamSink.java:76
20:33:45.732 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts	dst=null	perm=null	proto=rpc
20:33:45.732 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:45.732 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:45.732 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:45.733 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:45.739 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:45.739 INFO  DAGScheduler - Registering RDD 1104 (mapToPair at SparkUtils.java:161) as input to shuffle 46
20:33:45.739 INFO  DAGScheduler - Got job 173 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:45.739 INFO  DAGScheduler - Final stage: ResultStage 231 (runJob at SparkHadoopWriter.scala:83)
20:33:45.739 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 230)
20:33:45.739 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 230)
20:33:45.739 INFO  DAGScheduler - Submitting ShuffleMapStage 230 (MapPartitionsRDD[1104] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:45.757 INFO  MemoryStore - Block broadcast_463 stored as values in memory (estimated size 520.4 KiB, free 1914.8 MiB)
20:33:45.758 INFO  MemoryStore - Block broadcast_463_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1914.6 MiB)
20:33:45.758 INFO  BlockManagerInfo - Added broadcast_463_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1918.9 MiB)
20:33:45.758 INFO  SparkContext - Created broadcast 463 from broadcast at DAGScheduler.scala:1580
20:33:45.758 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 230 (MapPartitionsRDD[1104] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:45.758 INFO  TaskSchedulerImpl - Adding task set 230.0 with 1 tasks resource profile 0
20:33:45.759 INFO  TaskSetManager - Starting task 0.0 in stage 230.0 (TID 286) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:45.759 INFO  Executor - Running task 0.0 in stage 230.0 (TID 286)
20:33:45.782 INFO  BlockManagerInfo - Removed broadcast_456_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.1 MiB)
20:33:45.783 INFO  BlockManagerInfo - Removed broadcast_451_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.1 MiB)
20:33:45.783 INFO  BlockManagerInfo - Removed broadcast_454_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.2 MiB)
20:33:45.783 INFO  BlockManagerInfo - Removed broadcast_452_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.2 MiB)
20:33:45.784 INFO  BlockManagerInfo - Removed broadcast_457_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.3 MiB)
20:33:45.785 INFO  BlockManagerInfo - Removed broadcast_460_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:45.786 INFO  BlockManagerInfo - Removed broadcast_449_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:45.786 INFO  BlockManagerInfo - Removed broadcast_453_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.6 MiB)
20:33:45.786 INFO  BlockManagerInfo - Removed broadcast_455_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:45.787 INFO  BlockManagerInfo - Removed broadcast_458_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.8 MiB)
20:33:45.796 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:45.813 INFO  Executor - Finished task 0.0 in stage 230.0 (TID 286). 1191 bytes result sent to driver
20:33:45.813 INFO  TaskSetManager - Finished task 0.0 in stage 230.0 (TID 286) in 54 ms on localhost (executor driver) (1/1)
20:33:45.813 INFO  TaskSchedulerImpl - Removed TaskSet 230.0, whose tasks have all completed, from pool 
20:33:45.813 INFO  DAGScheduler - ShuffleMapStage 230 (mapToPair at SparkUtils.java:161) finished in 0.073 s
20:33:45.813 INFO  DAGScheduler - looking for newly runnable stages
20:33:45.813 INFO  DAGScheduler - running: HashSet()
20:33:45.813 INFO  DAGScheduler - waiting: HashSet(ResultStage 231)
20:33:45.813 INFO  DAGScheduler - failed: HashSet()
20:33:45.813 INFO  DAGScheduler - Submitting ResultStage 231 (MapPartitionsRDD[1109] at mapToPair at BamSink.java:91), which has no missing parents
20:33:45.820 INFO  MemoryStore - Block broadcast_464 stored as values in memory (estimated size 241.5 KiB, free 1918.4 MiB)
20:33:45.821 INFO  MemoryStore - Block broadcast_464_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1918.4 MiB)
20:33:45.821 INFO  BlockManagerInfo - Added broadcast_464_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.7 MiB)
20:33:45.821 INFO  SparkContext - Created broadcast 464 from broadcast at DAGScheduler.scala:1580
20:33:45.821 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 231 (MapPartitionsRDD[1109] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:45.821 INFO  TaskSchedulerImpl - Adding task set 231.0 with 1 tasks resource profile 0
20:33:45.822 INFO  TaskSetManager - Starting task 0.0 in stage 231.0 (TID 287) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:45.822 INFO  Executor - Running task 0.0 in stage 231.0 (TID 287)
20:33:45.826 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:45.826 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:45.837 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:45.837 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:45.837 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:45.837 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:45.837 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:45.837 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:45.838 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.839 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:45.841 INFO  StateChange - BLOCK* allocate blk_1073741890_1066, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/part-r-00000
20:33:45.843 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741890_1066 src: /127.0.0.1:35130 dest: /127.0.0.1:35765
20:33:45.844 INFO  clienttrace - src: /127.0.0.1:35130, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741890_1066, duration(ns): 1016921
20:33:45.844 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741890_1066, type=LAST_IN_PIPELINE terminating
20:33:45.845 INFO  FSNamesystem - BLOCK* blk_1073741890_1066 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/part-r-00000
20:33:46.246 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:46.246 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:46.247 INFO  StateChange - BLOCK* allocate blk_1073741891_1067, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/.part-r-00000.sbi
20:33:46.248 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741891_1067 src: /127.0.0.1:35144 dest: /127.0.0.1:35765
20:33:46.249 INFO  clienttrace - src: /127.0.0.1:35144, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741891_1067, duration(ns): 416467
20:33:46.249 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741891_1067, type=LAST_IN_PIPELINE terminating
20:33:46.250 INFO  FSNamesystem - BLOCK* blk_1073741891_1067 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/.part-r-00000.sbi
20:33:46.650 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:46.651 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0	dst=null	perm=null	proto=rpc
20:33:46.652 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0	dst=null	perm=null	proto=rpc
20:33:46.652 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/task_20250715203345303861849114191088_1109_r_000000	dst=null	perm=null	proto=rpc
20:33:46.653 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/_temporary/attempt_20250715203345303861849114191088_1109_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/task_20250715203345303861849114191088_1109_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:46.653 INFO  FileOutputCommitter - Saved output of task 'attempt_20250715203345303861849114191088_1109_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/task_20250715203345303861849114191088_1109_r_000000
20:33:46.653 INFO  SparkHadoopMapRedUtil - attempt_20250715203345303861849114191088_1109_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:46.654 INFO  Executor - Finished task 0.0 in stage 231.0 (TID 287). 1858 bytes result sent to driver
20:33:46.654 INFO  TaskSetManager - Finished task 0.0 in stage 231.0 (TID 287) in 832 ms on localhost (executor driver) (1/1)
20:33:46.654 INFO  TaskSchedulerImpl - Removed TaskSet 231.0, whose tasks have all completed, from pool 
20:33:46.654 INFO  DAGScheduler - ResultStage 231 (runJob at SparkHadoopWriter.scala:83) finished in 0.840 s
20:33:46.654 INFO  DAGScheduler - Job 173 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:46.654 INFO  TaskSchedulerImpl - Killing all running tasks in stage 231: Stage finished
20:33:46.654 INFO  DAGScheduler - Job 173 finished: runJob at SparkHadoopWriter.scala:83, took 0.915592 s
20:33:46.655 INFO  SparkHadoopWriter - Start to commit write Job job_20250715203345303861849114191088_1109.
20:33:46.655 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:46.656 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts	dst=null	perm=null	proto=rpc
20:33:46.656 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/task_20250715203345303861849114191088_1109_r_000000	dst=null	perm=null	proto=rpc
20:33:46.656 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:46.657 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/task_20250715203345303861849114191088_1109_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.658 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:46.658 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary/0/task_20250715203345303861849114191088_1109_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.659 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:46.659 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.660 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:46.661 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/.spark-staging-1109	dst=null	perm=null	proto=rpc
20:33:46.661 INFO  SparkHadoopWriter - Write Job job_20250715203345303861849114191088_1109 committed. Elapsed time: 6 ms.
20:33:46.661 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.662 INFO  StateChange - BLOCK* allocate blk_1073741892_1068, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/header
20:33:46.663 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741892_1068 src: /127.0.0.1:35154 dest: /127.0.0.1:35765
20:33:46.664 INFO  clienttrace - src: /127.0.0.1:35154, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741892_1068, duration(ns): 410364
20:33:46.664 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741892_1068, type=LAST_IN_PIPELINE terminating
20:33:46.665 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:46.666 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.667 INFO  StateChange - BLOCK* allocate blk_1073741893_1069, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/terminator
20:33:46.667 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741893_1069 src: /127.0.0.1:35162 dest: /127.0.0.1:35765
20:33:46.668 INFO  clienttrace - src: /127.0.0.1:35162, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741893_1069, duration(ns): 341288
20:33:46.668 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741893_1069, type=LAST_IN_PIPELINE terminating
20:33:46.669 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:46.669 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts	dst=null	perm=null	proto=rpc
20:33:46.670 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.670 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:46.671 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam
20:33:46.671 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.671 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:46.672 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:46.672 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.672 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam done
20:33:46.672 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:46.673 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi
20:33:46.673 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts	dst=null	perm=null	proto=rpc
20:33:46.673 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:46.674 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:46.675 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:46.676 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:46.676 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:46.677 INFO  StateChange - BLOCK* allocate blk_1073741894_1070, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi
20:33:46.678 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741894_1070 src: /127.0.0.1:35174 dest: /127.0.0.1:35765
20:33:46.679 INFO  clienttrace - src: /127.0.0.1:35174, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741894_1070, duration(ns): 440529
20:33:46.679 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741894_1070, type=LAST_IN_PIPELINE terminating
20:33:46.679 INFO  FSNamesystem - BLOCK* blk_1073741894_1070 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi
20:33:47.080 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:47.080 INFO  IndexFileMerger - Done merging .sbi files
20:33:47.081 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.parts	dst=null	perm=null	proto=rpc
20:33:47.090 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=null	proto=rpc
20:33:47.090 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=null	proto=rpc
20:33:47.091 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=null	proto=rpc
20:33:47.092 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:47.092 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.093 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.093 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.094 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.094 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.bai	dst=null	perm=null	proto=rpc
20:33:47.095 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bai	dst=null	perm=null	proto=rpc
20:33:47.096 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:47.097 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:47.097 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=null	proto=rpc
20:33:47.098 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=null	proto=rpc
20:33:47.098 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.sbi	dst=null	perm=null	proto=rpc
20:33:47.099 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:47.099 INFO  MemoryStore - Block broadcast_465 stored as values in memory (estimated size 320.0 B, free 1918.4 MiB)
20:33:47.099 INFO  MemoryStore - Block broadcast_465_piece0 stored as bytes in memory (estimated size 233.0 B, free 1918.4 MiB)
20:33:47.099 INFO  BlockManagerInfo - Added broadcast_465_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.7 MiB)
20:33:47.100 INFO  SparkContext - Created broadcast 465 from broadcast at BamSource.java:104
20:33:47.100 INFO  MemoryStore - Block broadcast_466 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:47.107 INFO  MemoryStore - Block broadcast_466_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:47.107 INFO  BlockManagerInfo - Added broadcast_466_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:47.107 INFO  SparkContext - Created broadcast 466 from newAPIHadoopFile at PathSplitSource.java:96
20:33:47.116 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.116 INFO  FileInputFormat - Total input files to process : 1
20:33:47.116 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.131 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:47.131 INFO  DAGScheduler - Got job 174 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:47.131 INFO  DAGScheduler - Final stage: ResultStage 232 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:47.131 INFO  DAGScheduler - Parents of final stage: List()
20:33:47.131 INFO  DAGScheduler - Missing parents: List()
20:33:47.131 INFO  DAGScheduler - Submitting ResultStage 232 (MapPartitionsRDD[1115] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:47.137 INFO  MemoryStore - Block broadcast_467 stored as values in memory (estimated size 148.2 KiB, free 1917.9 MiB)
20:33:47.138 INFO  MemoryStore - Block broadcast_467_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.8 MiB)
20:33:47.138 INFO  BlockManagerInfo - Added broadcast_467_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:47.138 INFO  SparkContext - Created broadcast 467 from broadcast at DAGScheduler.scala:1580
20:33:47.138 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 232 (MapPartitionsRDD[1115] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:47.138 INFO  TaskSchedulerImpl - Adding task set 232.0 with 1 tasks resource profile 0
20:33:47.139 INFO  TaskSetManager - Starting task 0.0 in stage 232.0 (TID 288) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:47.139 INFO  Executor - Running task 0.0 in stage 232.0 (TID 288)
20:33:47.150 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam:0+237038
20:33:47.151 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.151 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.152 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.bai	dst=null	perm=null	proto=rpc
20:33:47.152 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bai	dst=null	perm=null	proto=rpc
20:33:47.156 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:47.156 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:47.158 INFO  Executor - Finished task 0.0 in stage 232.0 (TID 288). 651483 bytes result sent to driver
20:33:47.159 INFO  TaskSetManager - Finished task 0.0 in stage 232.0 (TID 288) in 20 ms on localhost (executor driver) (1/1)
20:33:47.159 INFO  TaskSchedulerImpl - Removed TaskSet 232.0, whose tasks have all completed, from pool 
20:33:47.159 INFO  DAGScheduler - ResultStage 232 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.028 s
20:33:47.159 INFO  DAGScheduler - Job 174 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:47.159 INFO  TaskSchedulerImpl - Killing all running tasks in stage 232: Stage finished
20:33:47.160 INFO  DAGScheduler - Job 174 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.028879 s
20:33:47.171 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:47.171 INFO  DAGScheduler - Got job 175 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:47.171 INFO  DAGScheduler - Final stage: ResultStage 233 (count at ReadsSparkSinkUnitTest.java:185)
20:33:47.171 INFO  DAGScheduler - Parents of final stage: List()
20:33:47.171 INFO  DAGScheduler - Missing parents: List()
20:33:47.171 INFO  DAGScheduler - Submitting ResultStage 233 (MapPartitionsRDD[1097] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:47.188 INFO  MemoryStore - Block broadcast_468 stored as values in memory (estimated size 426.1 KiB, free 1917.4 MiB)
20:33:47.189 INFO  MemoryStore - Block broadcast_468_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.2 MiB)
20:33:47.189 INFO  BlockManagerInfo - Added broadcast_468_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:47.190 INFO  SparkContext - Created broadcast 468 from broadcast at DAGScheduler.scala:1580
20:33:47.190 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 233 (MapPartitionsRDD[1097] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:47.190 INFO  TaskSchedulerImpl - Adding task set 233.0 with 1 tasks resource profile 0
20:33:47.190 INFO  TaskSetManager - Starting task 0.0 in stage 233.0 (TID 289) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:47.191 INFO  Executor - Running task 0.0 in stage 233.0 (TID 289)
20:33:47.226 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:47.235 INFO  Executor - Finished task 0.0 in stage 233.0 (TID 289). 989 bytes result sent to driver
20:33:47.235 INFO  TaskSetManager - Finished task 0.0 in stage 233.0 (TID 289) in 45 ms on localhost (executor driver) (1/1)
20:33:47.235 INFO  TaskSchedulerImpl - Removed TaskSet 233.0, whose tasks have all completed, from pool 
20:33:47.235 INFO  DAGScheduler - ResultStage 233 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.064 s
20:33:47.235 INFO  DAGScheduler - Job 175 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:47.235 INFO  TaskSchedulerImpl - Killing all running tasks in stage 233: Stage finished
20:33:47.236 INFO  DAGScheduler - Job 175 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.064820 s
20:33:47.239 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:47.239 INFO  DAGScheduler - Got job 176 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:47.239 INFO  DAGScheduler - Final stage: ResultStage 234 (count at ReadsSparkSinkUnitTest.java:185)
20:33:47.239 INFO  DAGScheduler - Parents of final stage: List()
20:33:47.239 INFO  DAGScheduler - Missing parents: List()
20:33:47.239 INFO  DAGScheduler - Submitting ResultStage 234 (MapPartitionsRDD[1115] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:47.245 INFO  MemoryStore - Block broadcast_469 stored as values in memory (estimated size 148.1 KiB, free 1917.1 MiB)
20:33:47.246 INFO  MemoryStore - Block broadcast_469_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.1 MiB)
20:33:47.246 INFO  BlockManagerInfo - Added broadcast_469_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.4 MiB)
20:33:47.246 INFO  SparkContext - Created broadcast 469 from broadcast at DAGScheduler.scala:1580
20:33:47.246 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 234 (MapPartitionsRDD[1115] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:47.246 INFO  TaskSchedulerImpl - Adding task set 234.0 with 1 tasks resource profile 0
20:33:47.247 INFO  TaskSetManager - Starting task 0.0 in stage 234.0 (TID 290) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:47.247 INFO  Executor - Running task 0.0 in stage 234.0 (TID 290)
20:33:47.258 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam:0+237038
20:33:47.259 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.259 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam	dst=null	perm=null	proto=rpc
20:33:47.260 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bam.bai	dst=null	perm=null	proto=rpc
20:33:47.260 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_c39db310-712f-4451-abc4-69aebd508145.bai	dst=null	perm=null	proto=rpc
20:33:47.263 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:47.264 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:47.265 INFO  Executor - Finished task 0.0 in stage 234.0 (TID 290). 989 bytes result sent to driver
20:33:47.265 INFO  TaskSetManager - Finished task 0.0 in stage 234.0 (TID 290) in 18 ms on localhost (executor driver) (1/1)
20:33:47.265 INFO  TaskSchedulerImpl - Removed TaskSet 234.0, whose tasks have all completed, from pool 
20:33:47.266 INFO  DAGScheduler - ResultStage 234 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.027 s
20:33:47.266 INFO  DAGScheduler - Job 176 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:47.266 INFO  TaskSchedulerImpl - Killing all running tasks in stage 234: Stage finished
20:33:47.266 INFO  DAGScheduler - Job 176 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.026825 s
20:33:47.274 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:47.275 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:47.275 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:47.276 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:47.278 INFO  MemoryStore - Block broadcast_470 stored as values in memory (estimated size 297.9 KiB, free 1916.8 MiB)
20:33:47.284 INFO  MemoryStore - Block broadcast_470_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.7 MiB)
20:33:47.284 INFO  BlockManagerInfo - Added broadcast_470_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:47.285 INFO  SparkContext - Created broadcast 470 from newAPIHadoopFile at PathSplitSource.java:96
20:33:47.306 INFO  MemoryStore - Block broadcast_471 stored as values in memory (estimated size 297.9 KiB, free 1916.4 MiB)
20:33:47.312 INFO  MemoryStore - Block broadcast_471_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.4 MiB)
20:33:47.312 INFO  BlockManagerInfo - Added broadcast_471_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:47.312 INFO  SparkContext - Created broadcast 471 from newAPIHadoopFile at PathSplitSource.java:96
20:33:47.332 INFO  FileInputFormat - Total input files to process : 1
20:33:47.333 INFO  MemoryStore - Block broadcast_472 stored as values in memory (estimated size 160.7 KiB, free 1916.2 MiB)
20:33:47.334 INFO  MemoryStore - Block broadcast_472_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.2 MiB)
20:33:47.334 INFO  BlockManagerInfo - Added broadcast_472_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:47.334 INFO  SparkContext - Created broadcast 472 from broadcast at ReadsSparkSink.java:133
20:33:47.335 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:47.335 WARN  HtsjdkReadsRddStorage - Unrecognized write option: DISABLE
20:33:47.336 INFO  MemoryStore - Block broadcast_473 stored as values in memory (estimated size 163.2 KiB, free 1916.0 MiB)
20:33:47.336 INFO  MemoryStore - Block broadcast_473_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1916.0 MiB)
20:33:47.336 INFO  BlockManagerInfo - Added broadcast_473_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.3 MiB)
20:33:47.336 INFO  SparkContext - Created broadcast 473 from broadcast at BamSink.java:76
20:33:47.338 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts	dst=null	perm=null	proto=rpc
20:33:47.339 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:47.339 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:47.339 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:47.339 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:47.345 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:47.346 INFO  DAGScheduler - Registering RDD 1129 (mapToPair at SparkUtils.java:161) as input to shuffle 47
20:33:47.346 INFO  DAGScheduler - Got job 177 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:47.346 INFO  DAGScheduler - Final stage: ResultStage 236 (runJob at SparkHadoopWriter.scala:83)
20:33:47.346 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 235)
20:33:47.346 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 235)
20:33:47.346 INFO  DAGScheduler - Submitting ShuffleMapStage 235 (MapPartitionsRDD[1129] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:47.363 INFO  MemoryStore - Block broadcast_474 stored as values in memory (estimated size 520.4 KiB, free 1915.5 MiB)
20:33:47.367 INFO  BlockManagerInfo - Removed broadcast_464_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.3 MiB)
20:33:47.368 INFO  BlockManagerInfo - Removed broadcast_465_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.3 MiB)
20:33:47.368 INFO  BlockManagerInfo - Removed broadcast_463_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:47.368 INFO  MemoryStore - Block broadcast_474_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1916.3 MiB)
20:33:47.368 INFO  BlockManagerInfo - Added broadcast_474_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.3 MiB)
20:33:47.369 INFO  SparkContext - Created broadcast 474 from broadcast at DAGScheduler.scala:1580
20:33:47.369 INFO  BlockManagerInfo - Removed broadcast_469_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.4 MiB)
20:33:47.369 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 235 (MapPartitionsRDD[1129] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:47.369 INFO  TaskSchedulerImpl - Adding task set 235.0 with 1 tasks resource profile 0
20:33:47.369 INFO  BlockManagerInfo - Removed broadcast_467_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.5 MiB)
20:33:47.370 INFO  TaskSetManager - Starting task 0.0 in stage 235.0 (TID 291) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:47.370 INFO  BlockManagerInfo - Removed broadcast_466_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:47.370 INFO  Executor - Running task 0.0 in stage 235.0 (TID 291)
20:33:47.371 INFO  BlockManagerInfo - Removed broadcast_468_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:47.371 INFO  BlockManagerInfo - Removed broadcast_459_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.7 MiB)
20:33:47.372 INFO  BlockManagerInfo - Removed broadcast_462_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:47.373 INFO  BlockManagerInfo - Removed broadcast_471_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:47.374 INFO  BlockManagerInfo - Removed broadcast_461_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:47.402 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:47.416 INFO  Executor - Finished task 0.0 in stage 235.0 (TID 291). 1148 bytes result sent to driver
20:33:47.417 INFO  TaskSetManager - Finished task 0.0 in stage 235.0 (TID 291) in 48 ms on localhost (executor driver) (1/1)
20:33:47.417 INFO  TaskSchedulerImpl - Removed TaskSet 235.0, whose tasks have all completed, from pool 
20:33:47.417 INFO  DAGScheduler - ShuffleMapStage 235 (mapToPair at SparkUtils.java:161) finished in 0.071 s
20:33:47.417 INFO  DAGScheduler - looking for newly runnable stages
20:33:47.417 INFO  DAGScheduler - running: HashSet()
20:33:47.417 INFO  DAGScheduler - waiting: HashSet(ResultStage 236)
20:33:47.417 INFO  DAGScheduler - failed: HashSet()
20:33:47.417 INFO  DAGScheduler - Submitting ResultStage 236 (MapPartitionsRDD[1134] at mapToPair at BamSink.java:91), which has no missing parents
20:33:47.424 INFO  MemoryStore - Block broadcast_475 stored as values in memory (estimated size 241.5 KiB, free 1918.4 MiB)
20:33:47.425 INFO  MemoryStore - Block broadcast_475_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1918.4 MiB)
20:33:47.425 INFO  BlockManagerInfo - Added broadcast_475_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.7 MiB)
20:33:47.425 INFO  SparkContext - Created broadcast 475 from broadcast at DAGScheduler.scala:1580
20:33:47.425 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 236 (MapPartitionsRDD[1134] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:47.425 INFO  TaskSchedulerImpl - Adding task set 236.0 with 1 tasks resource profile 0
20:33:47.426 INFO  TaskSetManager - Starting task 0.0 in stage 236.0 (TID 292) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:47.426 INFO  Executor - Running task 0.0 in stage 236.0 (TID 292)
20:33:47.430 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:47.430 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:47.441 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:47.441 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:47.441 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:47.441 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:47.441 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:47.441 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:47.442 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:47.445 INFO  StateChange - BLOCK* allocate blk_1073741895_1071, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0/part-r-00000
20:33:47.446 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741895_1071 src: /127.0.0.1:53184 dest: /127.0.0.1:35765
20:33:47.448 INFO  clienttrace - src: /127.0.0.1:53184, dest: /127.0.0.1:35765, bytes: 231298, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741895_1071, duration(ns): 1301313
20:33:47.448 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741895_1071, type=LAST_IN_PIPELINE terminating
20:33:47.449 INFO  FSNamesystem - BLOCK* blk_1073741895_1071 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0/part-r-00000
20:33:47.850 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:47.850 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:47.851 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0	dst=null	perm=null	proto=rpc
20:33:47.852 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0	dst=null	perm=null	proto=rpc
20:33:47.852 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/task_202507152033478048570745994556458_1134_r_000000	dst=null	perm=null	proto=rpc
20:33:47.853 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/_temporary/attempt_202507152033478048570745994556458_1134_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/task_202507152033478048570745994556458_1134_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:47.853 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033478048570745994556458_1134_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/task_202507152033478048570745994556458_1134_r_000000
20:33:47.853 INFO  SparkHadoopMapRedUtil - attempt_202507152033478048570745994556458_1134_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:47.853 INFO  Executor - Finished task 0.0 in stage 236.0 (TID 292). 1858 bytes result sent to driver
20:33:47.854 INFO  TaskSetManager - Finished task 0.0 in stage 236.0 (TID 292) in 428 ms on localhost (executor driver) (1/1)
20:33:47.854 INFO  TaskSchedulerImpl - Removed TaskSet 236.0, whose tasks have all completed, from pool 
20:33:47.854 INFO  DAGScheduler - ResultStage 236 (runJob at SparkHadoopWriter.scala:83) finished in 0.436 s
20:33:47.854 INFO  DAGScheduler - Job 177 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:47.854 INFO  TaskSchedulerImpl - Killing all running tasks in stage 236: Stage finished
20:33:47.854 INFO  DAGScheduler - Job 177 finished: runJob at SparkHadoopWriter.scala:83, took 0.508748 s
20:33:47.854 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033478048570745994556458_1134.
20:33:47.855 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:47.855 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts	dst=null	perm=null	proto=rpc
20:33:47.855 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/task_202507152033478048570745994556458_1134_r_000000	dst=null	perm=null	proto=rpc
20:33:47.856 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:47.856 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary/0/task_202507152033478048570745994556458_1134_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:47.857 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:47.857 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:47.858 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:47.859 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/.spark-staging-1134	dst=null	perm=null	proto=rpc
20:33:47.859 INFO  SparkHadoopWriter - Write Job job_202507152033478048570745994556458_1134 committed. Elapsed time: 4 ms.
20:33:47.859 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:47.860 INFO  StateChange - BLOCK* allocate blk_1073741896_1072, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/header
20:33:47.861 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741896_1072 src: /127.0.0.1:53186 dest: /127.0.0.1:35765
20:33:47.862 INFO  clienttrace - src: /127.0.0.1:53186, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741896_1072, duration(ns): 473399
20:33:47.862 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741896_1072, type=LAST_IN_PIPELINE terminating
20:33:47.863 INFO  FSNamesystem - BLOCK* blk_1073741896_1072 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/header
20:33:48.264 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:48.265 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:48.266 INFO  StateChange - BLOCK* allocate blk_1073741897_1073, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/terminator
20:33:48.266 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741897_1073 src: /127.0.0.1:53194 dest: /127.0.0.1:35765
20:33:48.268 INFO  clienttrace - src: /127.0.0.1:53194, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741897_1073, duration(ns): 509277
20:33:48.268 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741897_1073, type=LAST_IN_PIPELINE terminating
20:33:48.268 INFO  FSNamesystem - BLOCK* blk_1073741897_1073 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/terminator
20:33:48.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741891_1067 replica FinalizedReplica, blk_1073741891_1067, FINALIZED
  getNumBytes()     = 212
  getBytesOnDisk()  = 212
  getVisibleLength()= 212
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741891 for deletion
20:33:48.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741886_1062 replica FinalizedReplica, blk_1073741886_1062, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741886 for deletion
20:33:48.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741891_1067 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741891
20:33:48.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741886_1062 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741886
20:33:48.669 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:48.670 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts	dst=null	perm=null	proto=rpc
20:33:48.671 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:48.671 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:48.671 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam
20:33:48.672 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:48.672 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.673 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.673 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:48.673 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam done
20:33:48.674 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.674 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.parts	dst=null	perm=null	proto=rpc
20:33:48.675 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.675 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.675 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.676 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.676 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.bai	dst=null	perm=null	proto=rpc
20:33:48.677 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bai	dst=null	perm=null	proto=rpc
20:33:48.679 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:48.680 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.680 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.sbi	dst=null	perm=null	proto=rpc
20:33:48.682 INFO  MemoryStore - Block broadcast_476 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:48.689 INFO  MemoryStore - Block broadcast_476_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:48.689 INFO  BlockManagerInfo - Added broadcast_476_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:48.690 INFO  SparkContext - Created broadcast 476 from newAPIHadoopFile at PathSplitSource.java:96
20:33:48.710 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.710 INFO  FileInputFormat - Total input files to process : 1
20:33:48.710 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.745 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:48.746 INFO  DAGScheduler - Got job 178 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:48.746 INFO  DAGScheduler - Final stage: ResultStage 237 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:48.746 INFO  DAGScheduler - Parents of final stage: List()
20:33:48.746 INFO  DAGScheduler - Missing parents: List()
20:33:48.746 INFO  DAGScheduler - Submitting ResultStage 237 (MapPartitionsRDD[1141] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:48.762 INFO  MemoryStore - Block broadcast_477 stored as values in memory (estimated size 426.2 KiB, free 1917.6 MiB)
20:33:48.763 INFO  MemoryStore - Block broadcast_477_piece0 stored as bytes in memory (estimated size 153.7 KiB, free 1917.4 MiB)
20:33:48.764 INFO  BlockManagerInfo - Added broadcast_477_piece0 in memory on localhost:45281 (size: 153.7 KiB, free: 1919.5 MiB)
20:33:48.764 INFO  SparkContext - Created broadcast 477 from broadcast at DAGScheduler.scala:1580
20:33:48.764 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 237 (MapPartitionsRDD[1141] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:48.764 INFO  TaskSchedulerImpl - Adding task set 237.0 with 1 tasks resource profile 0
20:33:48.764 INFO  TaskSetManager - Starting task 0.0 in stage 237.0 (TID 293) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:48.765 INFO  Executor - Running task 0.0 in stage 237.0 (TID 293)
20:33:48.795 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam:0+237038
20:33:48.796 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.796 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.798 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.798 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.799 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.bai	dst=null	perm=null	proto=rpc
20:33:48.799 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bai	dst=null	perm=null	proto=rpc
20:33:48.800 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:48.802 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.802 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.803 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.804 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:48.807 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.808 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.809 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.810 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.811 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.811 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.812 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.813 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.813 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.814 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.815 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.816 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.816 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.817 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.818 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.819 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.820 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.821 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.821 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.822 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.823 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.824 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.825 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.826 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.826 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.827 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.828 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.829 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.830 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.831 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.833 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.834 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.835 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.835 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.836 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.837 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.838 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.838 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.839 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.841 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.841 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.842 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.844 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.845 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.845 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.846 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.847 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.847 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.848 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.849 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.850 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.851 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:48.851 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.852 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.852 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.853 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.bai	dst=null	perm=null	proto=rpc
20:33:48.853 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bai	dst=null	perm=null	proto=rpc
20:33:48.855 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:48.857 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:48.858 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:48.861 INFO  Executor - Finished task 0.0 in stage 237.0 (TID 293). 651526 bytes result sent to driver
20:33:48.862 INFO  TaskSetManager - Finished task 0.0 in stage 237.0 (TID 293) in 98 ms on localhost (executor driver) (1/1)
20:33:48.862 INFO  TaskSchedulerImpl - Removed TaskSet 237.0, whose tasks have all completed, from pool 
20:33:48.862 INFO  DAGScheduler - ResultStage 237 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.116 s
20:33:48.863 INFO  DAGScheduler - Job 178 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:48.863 INFO  TaskSchedulerImpl - Killing all running tasks in stage 237: Stage finished
20:33:48.863 INFO  DAGScheduler - Job 178 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.117305 s
20:33:48.872 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:48.873 INFO  DAGScheduler - Got job 179 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:48.873 INFO  DAGScheduler - Final stage: ResultStage 238 (count at ReadsSparkSinkUnitTest.java:185)
20:33:48.873 INFO  DAGScheduler - Parents of final stage: List()
20:33:48.873 INFO  DAGScheduler - Missing parents: List()
20:33:48.873 INFO  DAGScheduler - Submitting ResultStage 238 (MapPartitionsRDD[1122] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:48.890 INFO  MemoryStore - Block broadcast_478 stored as values in memory (estimated size 426.1 KiB, free 1917.0 MiB)
20:33:48.891 INFO  MemoryStore - Block broadcast_478_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.9 MiB)
20:33:48.892 INFO  BlockManagerInfo - Added broadcast_478_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.4 MiB)
20:33:48.892 INFO  SparkContext - Created broadcast 478 from broadcast at DAGScheduler.scala:1580
20:33:48.892 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 238 (MapPartitionsRDD[1122] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:48.892 INFO  TaskSchedulerImpl - Adding task set 238.0 with 1 tasks resource profile 0
20:33:48.892 INFO  TaskSetManager - Starting task 0.0 in stage 238.0 (TID 294) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:48.893 INFO  Executor - Running task 0.0 in stage 238.0 (TID 294)
20:33:48.926 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:48.935 INFO  Executor - Finished task 0.0 in stage 238.0 (TID 294). 989 bytes result sent to driver
20:33:48.936 INFO  TaskSetManager - Finished task 0.0 in stage 238.0 (TID 294) in 44 ms on localhost (executor driver) (1/1)
20:33:48.936 INFO  TaskSchedulerImpl - Removed TaskSet 238.0, whose tasks have all completed, from pool 
20:33:48.936 INFO  DAGScheduler - ResultStage 238 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.063 s
20:33:48.936 INFO  DAGScheduler - Job 179 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:48.936 INFO  TaskSchedulerImpl - Killing all running tasks in stage 238: Stage finished
20:33:48.936 INFO  DAGScheduler - Job 179 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.063517 s
20:33:48.939 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:48.940 INFO  DAGScheduler - Got job 180 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:48.940 INFO  DAGScheduler - Final stage: ResultStage 239 (count at ReadsSparkSinkUnitTest.java:185)
20:33:48.940 INFO  DAGScheduler - Parents of final stage: List()
20:33:48.940 INFO  DAGScheduler - Missing parents: List()
20:33:48.940 INFO  DAGScheduler - Submitting ResultStage 239 (MapPartitionsRDD[1141] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:48.956 INFO  MemoryStore - Block broadcast_479 stored as values in memory (estimated size 426.1 KiB, free 1916.5 MiB)
20:33:48.957 INFO  MemoryStore - Block broadcast_479_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.3 MiB)
20:33:48.958 INFO  BlockManagerInfo - Added broadcast_479_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.2 MiB)
20:33:48.958 INFO  SparkContext - Created broadcast 479 from broadcast at DAGScheduler.scala:1580
20:33:48.958 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 239 (MapPartitionsRDD[1141] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:48.958 INFO  TaskSchedulerImpl - Adding task set 239.0 with 1 tasks resource profile 0
20:33:48.958 INFO  TaskSetManager - Starting task 0.0 in stage 239.0 (TID 295) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:48.959 INFO  Executor - Running task 0.0 in stage 239.0 (TID 295)
20:33:48.988 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam:0+237038
20:33:48.989 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.989 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.990 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:48.990 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.991 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.991 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.bai	dst=null	perm=null	proto=rpc
20:33:48.992 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bai	dst=null	perm=null	proto=rpc
20:33:48.993 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:48.994 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.995 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:48.996 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:49.001 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.002 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.002 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.003 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.003 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.004 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.005 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.006 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.007 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.007 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.008 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.009 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.009 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.010 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.011 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.011 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.012 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.013 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.014 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.014 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.015 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.016 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.017 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.018 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.018 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.019 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.020 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.021 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.022 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.022 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.023 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.024 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.025 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.026 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.026 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.027 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.027 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.028 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.029 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.030 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.030 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.031 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.032 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.033 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.034 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.034 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.035 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.036 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.037 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.037 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.038 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.039 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.039 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.040 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.041 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.041 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.042 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.043 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.043 WARN  DFSUtil - Unexpected value for data transfer bytes=1632 duration=0
20:33:49.044 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.045 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.045 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.046 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:49.046 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam	dst=null	perm=null	proto=rpc
20:33:49.047 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.047 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest1_bc1600dc-bfd9-41e0-bdb8-0a5a5e2e5b0d.bai	dst=null	perm=null	proto=rpc
20:33:49.049 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:49.052 WARN  DFSUtil - Unexpected value for data transfer bytes=233106 duration=0
20:33:49.052 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:49.054 INFO  Executor - Finished task 0.0 in stage 239.0 (TID 295). 989 bytes result sent to driver
20:33:49.054 INFO  TaskSetManager - Finished task 0.0 in stage 239.0 (TID 295) in 96 ms on localhost (executor driver) (1/1)
20:33:49.054 INFO  TaskSchedulerImpl - Removed TaskSet 239.0, whose tasks have all completed, from pool 
20:33:49.054 INFO  DAGScheduler - ResultStage 239 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.114 s
20:33:49.054 INFO  DAGScheduler - Job 180 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:49.054 INFO  TaskSchedulerImpl - Killing all running tasks in stage 239: Stage finished
20:33:49.055 INFO  DAGScheduler - Job 180 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.115214 s
20:33:49.066 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.067 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.067 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.068 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.070 INFO  MemoryStore - Block broadcast_480 stored as values in memory (estimated size 298.0 KiB, free 1916.0 MiB)
20:33:49.076 INFO  MemoryStore - Block broadcast_480_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1916.0 MiB)
20:33:49.076 INFO  BlockManagerInfo - Added broadcast_480_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.2 MiB)
20:33:49.077 INFO  SparkContext - Created broadcast 480 from newAPIHadoopFile at PathSplitSource.java:96
20:33:49.097 INFO  MemoryStore - Block broadcast_481 stored as values in memory (estimated size 298.0 KiB, free 1915.7 MiB)
20:33:49.103 INFO  MemoryStore - Block broadcast_481_piece0 stored as bytes in memory (estimated size 50.3 KiB, free 1915.6 MiB)
20:33:49.104 INFO  BlockManagerInfo - Added broadcast_481_piece0 in memory on localhost:45281 (size: 50.3 KiB, free: 1919.1 MiB)
20:33:49.104 INFO  SparkContext - Created broadcast 481 from newAPIHadoopFile at PathSplitSource.java:96
20:33:49.124 INFO  FileInputFormat - Total input files to process : 1
20:33:49.125 INFO  MemoryStore - Block broadcast_482 stored as values in memory (estimated size 160.7 KiB, free 1915.5 MiB)
20:33:49.126 INFO  MemoryStore - Block broadcast_482_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1915.5 MiB)
20:33:49.126 INFO  BlockManagerInfo - Added broadcast_482_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.1 MiB)
20:33:49.126 INFO  SparkContext - Created broadcast 482 from broadcast at ReadsSparkSink.java:133
20:33:49.127 INFO  MemoryStore - Block broadcast_483 stored as values in memory (estimated size 163.2 KiB, free 1915.3 MiB)
20:33:49.128 INFO  MemoryStore - Block broadcast_483_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1915.3 MiB)
20:33:49.128 INFO  BlockManagerInfo - Added broadcast_483_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.1 MiB)
20:33:49.128 INFO  SparkContext - Created broadcast 483 from broadcast at BamSink.java:76
20:33:49.130 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.130 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:49.130 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:49.130 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:49.131 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:49.137 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:49.137 INFO  DAGScheduler - Registering RDD 1155 (mapToPair at SparkUtils.java:161) as input to shuffle 48
20:33:49.137 INFO  DAGScheduler - Got job 181 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:49.138 INFO  DAGScheduler - Final stage: ResultStage 241 (runJob at SparkHadoopWriter.scala:83)
20:33:49.138 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 240)
20:33:49.138 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 240)
20:33:49.138 INFO  DAGScheduler - Submitting ShuffleMapStage 240 (MapPartitionsRDD[1155] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:49.155 INFO  MemoryStore - Block broadcast_484 stored as values in memory (estimated size 520.4 KiB, free 1914.8 MiB)
20:33:49.159 INFO  BlockManagerInfo - Removed broadcast_474_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.3 MiB)
20:33:49.160 INFO  BlockManagerInfo - Removed broadcast_470_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.3 MiB)
20:33:49.160 INFO  MemoryStore - Block broadcast_484_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1915.6 MiB)
20:33:49.160 INFO  BlockManagerInfo - Removed broadcast_481_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.3 MiB)
20:33:49.160 INFO  BlockManagerInfo - Added broadcast_484_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.2 MiB)
20:33:49.161 INFO  SparkContext - Created broadcast 484 from broadcast at DAGScheduler.scala:1580
20:33:49.161 INFO  BlockManagerInfo - Removed broadcast_473_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.2 MiB)
20:33:49.161 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 240 (MapPartitionsRDD[1155] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:49.161 INFO  TaskSchedulerImpl - Adding task set 240.0 with 1 tasks resource profile 0
20:33:49.161 INFO  BlockManagerInfo - Removed broadcast_479_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.3 MiB)
20:33:49.161 INFO  BlockManagerInfo - Removed broadcast_475_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.4 MiB)
20:33:49.161 INFO  TaskSetManager - Starting task 0.0 in stage 240.0 (TID 296) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7901 bytes) 
20:33:49.162 INFO  Executor - Running task 0.0 in stage 240.0 (TID 296)
20:33:49.162 INFO  BlockManagerInfo - Removed broadcast_478_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.6 MiB)
20:33:49.162 INFO  BlockManagerInfo - Removed broadcast_477_piece0 on localhost:45281 in memory (size: 153.7 KiB, free: 1919.7 MiB)
20:33:49.163 INFO  BlockManagerInfo - Removed broadcast_472_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:49.163 INFO  BlockManagerInfo - Removed broadcast_476_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:49.192 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:49.208 INFO  Executor - Finished task 0.0 in stage 240.0 (TID 296). 1148 bytes result sent to driver
20:33:49.208 INFO  TaskSetManager - Finished task 0.0 in stage 240.0 (TID 296) in 47 ms on localhost (executor driver) (1/1)
20:33:49.208 INFO  TaskSchedulerImpl - Removed TaskSet 240.0, whose tasks have all completed, from pool 
20:33:49.208 INFO  DAGScheduler - ShuffleMapStage 240 (mapToPair at SparkUtils.java:161) finished in 0.070 s
20:33:49.208 INFO  DAGScheduler - looking for newly runnable stages
20:33:49.208 INFO  DAGScheduler - running: HashSet()
20:33:49.208 INFO  DAGScheduler - waiting: HashSet(ResultStage 241)
20:33:49.208 INFO  DAGScheduler - failed: HashSet()
20:33:49.208 INFO  DAGScheduler - Submitting ResultStage 241 (MapPartitionsRDD[1160] at mapToPair at BamSink.java:91), which has no missing parents
20:33:49.218 INFO  MemoryStore - Block broadcast_485 stored as values in memory (estimated size 241.5 KiB, free 1918.4 MiB)
20:33:49.218 INFO  MemoryStore - Block broadcast_485_piece0 stored as bytes in memory (estimated size 67.1 KiB, free 1918.4 MiB)
20:33:49.218 INFO  BlockManagerInfo - Added broadcast_485_piece0 in memory on localhost:45281 (size: 67.1 KiB, free: 1919.7 MiB)
20:33:49.219 INFO  SparkContext - Created broadcast 485 from broadcast at DAGScheduler.scala:1580
20:33:49.219 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 241 (MapPartitionsRDD[1160] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:49.219 INFO  TaskSchedulerImpl - Adding task set 241.0 with 1 tasks resource profile 0
20:33:49.219 INFO  TaskSetManager - Starting task 0.0 in stage 241.0 (TID 297) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:49.220 INFO  Executor - Running task 0.0 in stage 241.0 (TID 297)
20:33:49.223 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:49.224 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:49.234 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:49.234 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:49.234 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:49.234 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:49.234 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:49.234 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:49.235 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.236 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.237 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.239 INFO  StateChange - BLOCK* allocate blk_1073741898_1074, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/part-r-00000
20:33:49.240 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741898_1074 src: /127.0.0.1:53910 dest: /127.0.0.1:35765
20:33:49.242 INFO  clienttrace - src: /127.0.0.1:53910, dest: /127.0.0.1:35765, bytes: 229774, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741898_1074, duration(ns): 1172297
20:33:49.242 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741898_1074, type=LAST_IN_PIPELINE terminating
20:33:49.243 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.243 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:49.244 INFO  StateChange - BLOCK* allocate blk_1073741899_1075, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/.part-r-00000.sbi
20:33:49.245 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741899_1075 src: /127.0.0.1:53912 dest: /127.0.0.1:35765
20:33:49.245 INFO  clienttrace - src: /127.0.0.1:53912, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741899_1075, duration(ns): 324745
20:33:49.245 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741899_1075, type=LAST_IN_PIPELINE terminating
20:33:49.246 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.248 INFO  StateChange - BLOCK* allocate blk_1073741900_1076, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/.part-r-00000.bai
20:33:49.248 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741900_1076 src: /127.0.0.1:53928 dest: /127.0.0.1:35765
20:33:49.249 INFO  clienttrace - src: /127.0.0.1:53928, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741900_1076, duration(ns): 390468
20:33:49.250 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741900_1076, type=LAST_IN_PIPELINE terminating
20:33:49.250 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.251 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0	dst=null	perm=null	proto=rpc
20:33:49.251 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0	dst=null	perm=null	proto=rpc
20:33:49.252 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000	dst=null	perm=null	proto=rpc
20:33:49.252 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/_temporary/attempt_202507152033495708424505184325732_1160_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:49.252 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033495708424505184325732_1160_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000
20:33:49.252 INFO  SparkHadoopMapRedUtil - attempt_202507152033495708424505184325732_1160_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:49.253 INFO  Executor - Finished task 0.0 in stage 241.0 (TID 297). 1858 bytes result sent to driver
20:33:49.253 INFO  TaskSetManager - Finished task 0.0 in stage 241.0 (TID 297) in 34 ms on localhost (executor driver) (1/1)
20:33:49.253 INFO  TaskSchedulerImpl - Removed TaskSet 241.0, whose tasks have all completed, from pool 
20:33:49.254 INFO  DAGScheduler - ResultStage 241 (runJob at SparkHadoopWriter.scala:83) finished in 0.045 s
20:33:49.254 INFO  DAGScheduler - Job 181 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:49.254 INFO  TaskSchedulerImpl - Killing all running tasks in stage 241: Stage finished
20:33:49.254 INFO  DAGScheduler - Job 181 finished: runJob at SparkHadoopWriter.scala:83, took 0.116798 s
20:33:49.254 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033495708424505184325732_1160.
20:33:49.254 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:49.255 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.255 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000	dst=null	perm=null	proto=rpc
20:33:49.256 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:49.256 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.257 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:49.257 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.257 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:49.258 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary/0/task_202507152033495708424505184325732_1160_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.258 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:49.259 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.259 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.260 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.spark-staging-1160	dst=null	perm=null	proto=rpc
20:33:49.260 INFO  SparkHadoopWriter - Write Job job_202507152033495708424505184325732_1160 committed. Elapsed time: 6 ms.
20:33:49.261 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.262 INFO  StateChange - BLOCK* allocate blk_1073741901_1077, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/header
20:33:49.263 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741901_1077 src: /127.0.0.1:53938 dest: /127.0.0.1:35765
20:33:49.264 INFO  clienttrace - src: /127.0.0.1:53938, dest: /127.0.0.1:35765, bytes: 5712, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741901_1077, duration(ns): 475187
20:33:49.264 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741901_1077, type=LAST_IN_PIPELINE terminating
20:33:49.265 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.265 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.266 INFO  StateChange - BLOCK* allocate blk_1073741902_1078, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/terminator
20:33:49.267 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741902_1078 src: /127.0.0.1:53946 dest: /127.0.0.1:35765
20:33:49.268 INFO  clienttrace - src: /127.0.0.1:53946, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741902_1078, duration(ns): 344020
20:33:49.268 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741902_1078, type=LAST_IN_PIPELINE terminating
20:33:49.268 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.269 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.269 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.270 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.270 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam
20:33:49.271 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.271 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.271 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.272 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.272 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam done
20:33:49.272 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.272 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi
20:33:49.273 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.273 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.274 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:49.275 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:49.275 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:49.276 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:49.276 INFO  StateChange - BLOCK* allocate blk_1073741903_1079, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi
20:33:49.277 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741903_1079 src: /127.0.0.1:53962 dest: /127.0.0.1:35765
20:33:49.278 INFO  clienttrace - src: /127.0.0.1:53962, dest: /127.0.0.1:35765, bytes: 212, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741903_1079, duration(ns): 372497
20:33:49.278 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741903_1079, type=LAST_IN_PIPELINE terminating
20:33:49.278 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.279 INFO  IndexFileMerger - Done merging .sbi files
20:33:49.279 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai
20:33:49.279 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.280 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.280 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:49.281 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:49.281 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.282 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:49.283 INFO  StateChange - BLOCK* allocate blk_1073741904_1080, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai
20:33:49.283 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741904_1080 src: /127.0.0.1:53968 dest: /127.0.0.1:35765
20:33:49.284 INFO  clienttrace - src: /127.0.0.1:53968, dest: /127.0.0.1:35765, bytes: 5472, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741904_1080, duration(ns): 375316
20:33:49.284 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741904_1080, type=LAST_IN_PIPELINE terminating
20:33:49.285 INFO  FSNamesystem - BLOCK* blk_1073741904_1080 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai
20:33:49.685 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.686 INFO  IndexFileMerger - Done merging .bai files
20:33:49.687 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.695 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.703 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=null	proto=rpc
20:33:49.703 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=null	proto=rpc
20:33:49.704 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=null	proto=rpc
20:33:49.705 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.705 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.706 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.706 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.707 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.707 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.707 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.709 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:49.710 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.711 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.711 WARN  DFSUtil - Unexpected value for data transfer bytes=231570 duration=0
20:33:49.711 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=null	proto=rpc
20:33:49.711 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=null	proto=rpc
20:33:49.712 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.sbi	dst=null	perm=null	proto=rpc
20:33:49.712 WARN  DFSUtil - Unexpected value for data transfer bytes=216 duration=0
20:33:49.713 INFO  MemoryStore - Block broadcast_486 stored as values in memory (estimated size 320.0 B, free 1918.4 MiB)
20:33:49.713 INFO  MemoryStore - Block broadcast_486_piece0 stored as bytes in memory (estimated size 233.0 B, free 1918.4 MiB)
20:33:49.713 INFO  BlockManagerInfo - Added broadcast_486_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.7 MiB)
20:33:49.714 INFO  SparkContext - Created broadcast 486 from broadcast at BamSource.java:104
20:33:49.714 INFO  MemoryStore - Block broadcast_487 stored as values in memory (estimated size 297.9 KiB, free 1918.1 MiB)
20:33:49.720 INFO  MemoryStore - Block broadcast_487_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.0 MiB)
20:33:49.720 INFO  BlockManagerInfo - Added broadcast_487_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:49.721 INFO  SparkContext - Created broadcast 487 from newAPIHadoopFile at PathSplitSource.java:96
20:33:49.729 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.729 INFO  FileInputFormat - Total input files to process : 1
20:33:49.730 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.744 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:49.744 INFO  DAGScheduler - Got job 182 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:49.744 INFO  DAGScheduler - Final stage: ResultStage 242 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:49.744 INFO  DAGScheduler - Parents of final stage: List()
20:33:49.744 INFO  DAGScheduler - Missing parents: List()
20:33:49.744 INFO  DAGScheduler - Submitting ResultStage 242 (MapPartitionsRDD[1166] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:49.750 INFO  MemoryStore - Block broadcast_488 stored as values in memory (estimated size 148.2 KiB, free 1917.9 MiB)
20:33:49.751 INFO  MemoryStore - Block broadcast_488_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.8 MiB)
20:33:49.751 INFO  BlockManagerInfo - Added broadcast_488_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:49.751 INFO  SparkContext - Created broadcast 488 from broadcast at DAGScheduler.scala:1580
20:33:49.751 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 242 (MapPartitionsRDD[1166] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:49.752 INFO  TaskSchedulerImpl - Adding task set 242.0 with 1 tasks resource profile 0
20:33:49.752 INFO  TaskSetManager - Starting task 0.0 in stage 242.0 (TID 298) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:49.752 INFO  Executor - Running task 0.0 in stage 242.0 (TID 298)
20:33:49.764 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam:0+235514
20:33:49.764 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.765 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.766 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.766 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.766 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.767 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:49.769 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.770 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.771 WARN  DFSUtil - Unexpected value for data transfer bytes=231570 duration=0
20:33:49.771 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:49.773 INFO  Executor - Finished task 0.0 in stage 242.0 (TID 298). 650141 bytes result sent to driver
20:33:49.774 INFO  TaskSetManager - Finished task 0.0 in stage 242.0 (TID 298) in 22 ms on localhost (executor driver) (1/1)
20:33:49.774 INFO  TaskSchedulerImpl - Removed TaskSet 242.0, whose tasks have all completed, from pool 
20:33:49.774 INFO  DAGScheduler - ResultStage 242 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.029 s
20:33:49.775 INFO  DAGScheduler - Job 182 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:49.775 INFO  TaskSchedulerImpl - Killing all running tasks in stage 242: Stage finished
20:33:49.775 INFO  DAGScheduler - Job 182 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.030820 s
20:33:49.784 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:49.784 INFO  DAGScheduler - Got job 183 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:49.784 INFO  DAGScheduler - Final stage: ResultStage 243 (count at ReadsSparkSinkUnitTest.java:185)
20:33:49.784 INFO  DAGScheduler - Parents of final stage: List()
20:33:49.785 INFO  DAGScheduler - Missing parents: List()
20:33:49.785 INFO  DAGScheduler - Submitting ResultStage 243 (MapPartitionsRDD[1148] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:49.801 INFO  MemoryStore - Block broadcast_489 stored as values in memory (estimated size 426.1 KiB, free 1917.4 MiB)
20:33:49.803 INFO  MemoryStore - Block broadcast_489_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.2 MiB)
20:33:49.803 INFO  BlockManagerInfo - Added broadcast_489_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:49.803 INFO  SparkContext - Created broadcast 489 from broadcast at DAGScheduler.scala:1580
20:33:49.803 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 243 (MapPartitionsRDD[1148] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:49.803 INFO  TaskSchedulerImpl - Adding task set 243.0 with 1 tasks resource profile 0
20:33:49.804 INFO  TaskSetManager - Starting task 0.0 in stage 243.0 (TID 299) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7912 bytes) 
20:33:49.804 INFO  Executor - Running task 0.0 in stage 243.0 (TID 299)
20:33:49.834 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
20:33:49.845 INFO  Executor - Finished task 0.0 in stage 243.0 (TID 299). 989 bytes result sent to driver
20:33:49.846 INFO  TaskSetManager - Finished task 0.0 in stage 243.0 (TID 299) in 42 ms on localhost (executor driver) (1/1)
20:33:49.846 INFO  TaskSchedulerImpl - Removed TaskSet 243.0, whose tasks have all completed, from pool 
20:33:49.846 INFO  DAGScheduler - ResultStage 243 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.061 s
20:33:49.846 INFO  DAGScheduler - Job 183 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:49.846 INFO  TaskSchedulerImpl - Killing all running tasks in stage 243: Stage finished
20:33:49.846 INFO  DAGScheduler - Job 183 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.061910 s
20:33:49.849 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:49.850 INFO  DAGScheduler - Got job 184 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:49.850 INFO  DAGScheduler - Final stage: ResultStage 244 (count at ReadsSparkSinkUnitTest.java:185)
20:33:49.850 INFO  DAGScheduler - Parents of final stage: List()
20:33:49.850 INFO  DAGScheduler - Missing parents: List()
20:33:49.850 INFO  DAGScheduler - Submitting ResultStage 244 (MapPartitionsRDD[1166] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:49.856 INFO  MemoryStore - Block broadcast_490 stored as values in memory (estimated size 148.1 KiB, free 1917.1 MiB)
20:33:49.857 INFO  MemoryStore - Block broadcast_490_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.1 MiB)
20:33:49.857 INFO  BlockManagerInfo - Added broadcast_490_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.4 MiB)
20:33:49.857 INFO  SparkContext - Created broadcast 490 from broadcast at DAGScheduler.scala:1580
20:33:49.857 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 244 (MapPartitionsRDD[1166] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:49.857 INFO  TaskSchedulerImpl - Adding task set 244.0 with 1 tasks resource profile 0
20:33:49.858 INFO  TaskSetManager - Starting task 0.0 in stage 244.0 (TID 300) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:49.858 INFO  Executor - Running task 0.0 in stage 244.0 (TID 300)
20:33:49.870 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam:0+235514
20:33:49.871 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.871 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam	dst=null	perm=null	proto=rpc
20:33:49.872 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.872 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.873 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest2_5808be14-5f4d-47b9-b283-80618ab329db.bam.bai	dst=null	perm=null	proto=rpc
20:33:49.874 WARN  DFSUtil - Unexpected value for data transfer bytes=5760 duration=0
20:33:49.875 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.876 WARN  DFSUtil - Unexpected value for data transfer bytes=5516 duration=0
20:33:49.877 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:49.879 INFO  Executor - Finished task 0.0 in stage 244.0 (TID 300). 989 bytes result sent to driver
20:33:49.879 INFO  TaskSetManager - Finished task 0.0 in stage 244.0 (TID 300) in 21 ms on localhost (executor driver) (1/1)
20:33:49.879 INFO  TaskSchedulerImpl - Removed TaskSet 244.0, whose tasks have all completed, from pool 
20:33:49.879 INFO  DAGScheduler - ResultStage 244 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.029 s
20:33:49.879 INFO  DAGScheduler - Job 184 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:49.879 INFO  TaskSchedulerImpl - Killing all running tasks in stage 244: Stage finished
20:33:49.879 INFO  DAGScheduler - Job 184 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.029717 s
20:33:49.888 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:49.889 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:49.890 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:49.891 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:49.893 INFO  MemoryStore - Block broadcast_491 stored as values in memory (estimated size 298.0 KiB, free 1916.8 MiB)
20:33:49.899 INFO  MemoryStore - Block broadcast_491_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.7 MiB)
20:33:49.899 INFO  BlockManagerInfo - Added broadcast_491_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:49.900 INFO  SparkContext - Created broadcast 491 from newAPIHadoopFile at PathSplitSource.java:96
20:33:49.921 INFO  MemoryStore - Block broadcast_492 stored as values in memory (estimated size 298.0 KiB, free 1916.4 MiB)
20:33:49.927 INFO  MemoryStore - Block broadcast_492_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.4 MiB)
20:33:49.927 INFO  BlockManagerInfo - Added broadcast_492_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.3 MiB)
20:33:49.927 INFO  SparkContext - Created broadcast 492 from newAPIHadoopFile at PathSplitSource.java:96
20:33:49.947 INFO  FileInputFormat - Total input files to process : 1
20:33:49.949 INFO  MemoryStore - Block broadcast_493 stored as values in memory (estimated size 19.6 KiB, free 1916.4 MiB)
20:33:49.949 INFO  MemoryStore - Block broadcast_493_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1916.3 MiB)
20:33:49.949 INFO  BlockManagerInfo - Added broadcast_493_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.3 MiB)
20:33:49.949 INFO  SparkContext - Created broadcast 493 from broadcast at ReadsSparkSink.java:133
20:33:49.950 INFO  MemoryStore - Block broadcast_494 stored as values in memory (estimated size 20.0 KiB, free 1916.3 MiB)
20:33:49.955 INFO  MemoryStore - Block broadcast_494_piece0 stored as bytes in memory (estimated size 1890.0 B, free 1916.3 MiB)
20:33:49.955 INFO  BlockManagerInfo - Added broadcast_494_piece0 in memory on localhost:45281 (size: 1890.0 B, free: 1919.3 MiB)
20:33:49.955 INFO  BlockManagerInfo - Removed broadcast_486_piece0 on localhost:45281 in memory (size: 233.0 B, free: 1919.3 MiB)
20:33:49.955 INFO  SparkContext - Created broadcast 494 from broadcast at BamSink.java:76
20:33:49.955 INFO  BlockManagerInfo - Removed broadcast_489_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.4 MiB)
20:33:49.956 INFO  BlockManagerInfo - Removed broadcast_487_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:49.956 INFO  BlockManagerInfo - Removed broadcast_488_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.5 MiB)
20:33:49.956 INFO  BlockManagerInfo - Removed broadcast_484_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.7 MiB)
20:33:49.957 INFO  BlockManagerInfo - Removed broadcast_482_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:49.957 INFO  BlockManagerInfo - Removed broadcast_485_piece0 on localhost:45281 in memory (size: 67.1 KiB, free: 1919.8 MiB)
20:33:49.958 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts	dst=null	perm=null	proto=rpc
20:33:49.958 INFO  BlockManagerInfo - Removed broadcast_483_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.8 MiB)
20:33:49.958 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:49.958 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:49.958 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:49.959 INFO  BlockManagerInfo - Removed broadcast_480_piece0 on localhost:45281 in memory (size: 50.3 KiB, free: 1919.8 MiB)
20:33:49.959 INFO  BlockManagerInfo - Removed broadcast_490_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.9 MiB)
20:33:49.959 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:49.959 INFO  BlockManagerInfo - Removed broadcast_492_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.9 MiB)
20:33:49.966 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:49.966 INFO  DAGScheduler - Registering RDD 1180 (mapToPair at SparkUtils.java:161) as input to shuffle 49
20:33:49.966 INFO  DAGScheduler - Got job 185 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:49.966 INFO  DAGScheduler - Final stage: ResultStage 246 (runJob at SparkHadoopWriter.scala:83)
20:33:49.966 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 245)
20:33:49.966 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 245)
20:33:49.966 INFO  DAGScheduler - Submitting ShuffleMapStage 245 (MapPartitionsRDD[1180] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:49.983 INFO  MemoryStore - Block broadcast_495 stored as values in memory (estimated size 434.3 KiB, free 1919.2 MiB)
20:33:49.985 INFO  MemoryStore - Block broadcast_495_piece0 stored as bytes in memory (estimated size 157.6 KiB, free 1919.0 MiB)
20:33:49.985 INFO  BlockManagerInfo - Added broadcast_495_piece0 in memory on localhost:45281 (size: 157.6 KiB, free: 1919.8 MiB)
20:33:49.985 INFO  SparkContext - Created broadcast 495 from broadcast at DAGScheduler.scala:1580
20:33:49.985 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 245 (MapPartitionsRDD[1180] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:49.985 INFO  TaskSchedulerImpl - Adding task set 245.0 with 1 tasks resource profile 0
20:33:49.986 INFO  TaskSetManager - Starting task 0.0 in stage 245.0 (TID 301) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7882 bytes) 
20:33:49.986 INFO  Executor - Running task 0.0 in stage 245.0 (TID 301)
20:33:50.016 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:50.029 INFO  Executor - Finished task 0.0 in stage 245.0 (TID 301). 1148 bytes result sent to driver
20:33:50.029 INFO  TaskSetManager - Finished task 0.0 in stage 245.0 (TID 301) in 43 ms on localhost (executor driver) (1/1)
20:33:50.029 INFO  TaskSchedulerImpl - Removed TaskSet 245.0, whose tasks have all completed, from pool 
20:33:50.029 INFO  DAGScheduler - ShuffleMapStage 245 (mapToPair at SparkUtils.java:161) finished in 0.062 s
20:33:50.029 INFO  DAGScheduler - looking for newly runnable stages
20:33:50.029 INFO  DAGScheduler - running: HashSet()
20:33:50.029 INFO  DAGScheduler - waiting: HashSet(ResultStage 246)
20:33:50.029 INFO  DAGScheduler - failed: HashSet()
20:33:50.029 INFO  DAGScheduler - Submitting ResultStage 246 (MapPartitionsRDD[1185] at mapToPair at BamSink.java:91), which has no missing parents
20:33:50.038 INFO  MemoryStore - Block broadcast_496 stored as values in memory (estimated size 155.4 KiB, free 1918.9 MiB)
20:33:50.039 INFO  MemoryStore - Block broadcast_496_piece0 stored as bytes in memory (estimated size 58.5 KiB, free 1918.8 MiB)
20:33:50.039 INFO  BlockManagerInfo - Added broadcast_496_piece0 in memory on localhost:45281 (size: 58.5 KiB, free: 1919.7 MiB)
20:33:50.039 INFO  SparkContext - Created broadcast 496 from broadcast at DAGScheduler.scala:1580
20:33:50.039 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 246 (MapPartitionsRDD[1185] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:50.039 INFO  TaskSchedulerImpl - Adding task set 246.0 with 1 tasks resource profile 0
20:33:50.040 INFO  TaskSetManager - Starting task 0.0 in stage 246.0 (TID 302) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:50.040 INFO  Executor - Running task 0.0 in stage 246.0 (TID 302)
20:33:50.044 INFO  ShuffleBlockFetcherIterator - Getting 1 (312.6 KiB) non-empty blocks including 1 (312.6 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:50.044 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:50.055 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:50.055 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:50.055 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:50.055 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:50.055 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:50.055 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:50.056 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.057 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.058 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.059 INFO  StateChange - BLOCK* allocate blk_1073741905_1081, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/part-r-00000
20:33:50.060 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741905_1081 src: /127.0.0.1:53998 dest: /127.0.0.1:35765
20:33:50.062 INFO  clienttrace - src: /127.0.0.1:53998, dest: /127.0.0.1:35765, bytes: 235299, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741905_1081, duration(ns): 1077047
20:33:50.062 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741905_1081, type=LAST_IN_PIPELINE terminating
20:33:50.063 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.063 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/part-r-00000	dst=null	perm=null	proto=rpc
20:33:50.064 INFO  StateChange - BLOCK* allocate blk_1073741906_1082, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.sbi
20:33:50.064 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741906_1082 src: /127.0.0.1:54006 dest: /127.0.0.1:35765
20:33:50.065 INFO  clienttrace - src: /127.0.0.1:54006, dest: /127.0.0.1:35765, bytes: 204, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741906_1082, duration(ns): 384820
20:33:50.066 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741906_1082, type=LAST_IN_PIPELINE terminating
20:33:50.066 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.067 INFO  StateChange - BLOCK* allocate blk_1073741907_1083, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.bai
20:33:50.068 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741907_1083 src: /127.0.0.1:54012 dest: /127.0.0.1:35765
20:33:50.069 INFO  clienttrace - src: /127.0.0.1:54012, dest: /127.0.0.1:35765, bytes: 592, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741907_1083, duration(ns): 384674
20:33:50.069 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741907_1083, type=LAST_IN_PIPELINE terminating
20:33:50.070 INFO  FSNamesystem - BLOCK* blk_1073741907_1083 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.bai
20:33:50.471 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0/.part-r-00000.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.472 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0	dst=null	perm=null	proto=rpc
20:33:50.472 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0	dst=null	perm=null	proto=rpc
20:33:50.473 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000	dst=null	perm=null	proto=rpc
20:33:50.474 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/_temporary/attempt_202507152033497317877549374352352_1185_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:50.474 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033497317877549374352352_1185_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000
20:33:50.474 INFO  SparkHadoopMapRedUtil - attempt_202507152033497317877549374352352_1185_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:50.474 INFO  Executor - Finished task 0.0 in stage 246.0 (TID 302). 1858 bytes result sent to driver
20:33:50.475 INFO  TaskSetManager - Finished task 0.0 in stage 246.0 (TID 302) in 435 ms on localhost (executor driver) (1/1)
20:33:50.475 INFO  TaskSchedulerImpl - Removed TaskSet 246.0, whose tasks have all completed, from pool 
20:33:50.475 INFO  DAGScheduler - ResultStage 246 (runJob at SparkHadoopWriter.scala:83) finished in 0.445 s
20:33:50.475 INFO  DAGScheduler - Job 185 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:50.475 INFO  TaskSchedulerImpl - Killing all running tasks in stage 246: Stage finished
20:33:50.475 INFO  DAGScheduler - Job 185 finished: runJob at SparkHadoopWriter.scala:83, took 0.509359 s
20:33:50.475 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033497317877549374352352_1185.
20:33:50.476 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:50.476 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts	dst=null	perm=null	proto=rpc
20:33:50.477 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000	dst=null	perm=null	proto=rpc
20:33:50.477 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:50.478 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000/.part-r-00000.bai	dst=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.bai	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.478 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:50.478 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000/.part-r-00000.sbi	dst=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.sbi	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.479 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:50.479 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary/0/task_202507152033497317877549374352352_1185_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.480 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:50.480 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.481 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.481 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.spark-staging-1185	dst=null	perm=null	proto=rpc
20:33:50.482 INFO  SparkHadoopWriter - Write Job job_202507152033497317877549374352352_1185 committed. Elapsed time: 6 ms.
20:33:50.482 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.483 INFO  StateChange - BLOCK* allocate blk_1073741908_1084, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/header
20:33:50.484 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741908_1084 src: /127.0.0.1:54020 dest: /127.0.0.1:35765
20:33:50.485 INFO  clienttrace - src: /127.0.0.1:54020, dest: /127.0.0.1:35765, bytes: 1190, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741908_1084, duration(ns): 484959
20:33:50.485 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741908_1084, type=LAST_IN_PIPELINE terminating
20:33:50.486 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.487 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.488 INFO  StateChange - BLOCK* allocate blk_1073741909_1085, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/terminator
20:33:50.488 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741909_1085 src: /127.0.0.1:54036 dest: /127.0.0.1:35765
20:33:50.490 INFO  clienttrace - src: /127.0.0.1:54036, dest: /127.0.0.1:35765, bytes: 28, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741909_1085, duration(ns): 418583
20:33:50.490 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741909_1085, type=LAST_IN_PIPELINE terminating
20:33:50.490 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.491 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts	dst=null	perm=null	proto=rpc
20:33:50.492 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.492 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.492 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam
20:33:50.493 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/header, /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.493 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:50.494 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:50.494 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.494 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam done
20:33:50.495 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:50.495 INFO  IndexFileMerger - Merging .sbi files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi
20:33:50.495 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts	dst=null	perm=null	proto=rpc
20:33:50.496 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.497 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:50.497 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:50.498 WARN  DFSUtil - Unexpected value for data transfer bytes=208 duration=0
20:33:50.498 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.sbi	dst=null	perm=null	proto=rpc
20:33:50.499 INFO  StateChange - BLOCK* allocate blk_1073741910_1086, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi
20:33:50.500 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741910_1086 src: /127.0.0.1:54038 dest: /127.0.0.1:35765
20:33:50.501 INFO  clienttrace - src: /127.0.0.1:54038, dest: /127.0.0.1:35765, bytes: 204, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741910_1086, duration(ns): 420275
20:33:50.501 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741910_1086, type=LAST_IN_PIPELINE terminating
20:33:50.502 INFO  FSNamesystem - BLOCK* blk_1073741910_1086 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi
20:33:50.902 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:50.903 INFO  IndexFileMerger - Done merging .sbi files
20:33:50.903 INFO  IndexFileMerger - Merging .bai files in temp directory hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/ to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai
20:33:50.903 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts	dst=null	perm=null	proto=rpc
20:33:50.904 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:50.905 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:50.905 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:50.906 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:50.907 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts/.part-r-00000.bai	dst=null	perm=null	proto=rpc
20:33:50.908 INFO  StateChange - BLOCK* allocate blk_1073741911_1087, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai
20:33:50.908 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741911_1087 src: /127.0.0.1:54044 dest: /127.0.0.1:35765
20:33:50.909 INFO  clienttrace - src: /127.0.0.1:54044, dest: /127.0.0.1:35765, bytes: 592, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741911_1087, duration(ns): 388836
20:33:50.909 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741911_1087, type=LAST_IN_PIPELINE terminating
20:33:50.910 INFO  FSNamesystem - BLOCK* blk_1073741911_1087 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai
20:33:51.311 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:51.311 INFO  IndexFileMerger - Done merging .bai files
20:33:51.311 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.parts	dst=null	perm=null	proto=rpc
20:33:51.320 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.328 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:51.328 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:51.329 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:51.329 WARN  DFSUtil - Unexpected value for data transfer bytes=208 duration=0
20:33:51.330 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.330 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.330 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.331 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.331 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.332 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.332 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.333 WARN  DFSUtil - Unexpected value for data transfer bytes=1202 duration=0
20:33:51.334 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:51.335 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:51.335 WARN  DFSUtil - Unexpected value for data transfer bytes=237139 duration=0
20:33:51.335 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:51.335 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:51.336 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.sbi	dst=null	perm=null	proto=rpc
20:33:51.336 WARN  DFSUtil - Unexpected value for data transfer bytes=208 duration=0
20:33:51.337 INFO  MemoryStore - Block broadcast_497 stored as values in memory (estimated size 312.0 B, free 1918.8 MiB)
20:33:51.337 INFO  MemoryStore - Block broadcast_497_piece0 stored as bytes in memory (estimated size 231.0 B, free 1918.8 MiB)
20:33:51.337 INFO  BlockManagerInfo - Added broadcast_497_piece0 in memory on localhost:45281 (size: 231.0 B, free: 1919.7 MiB)
20:33:51.337 INFO  SparkContext - Created broadcast 497 from broadcast at BamSource.java:104
20:33:51.339 INFO  MemoryStore - Block broadcast_498 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:51.350 INFO  MemoryStore - Block broadcast_498_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.5 MiB)
20:33:51.350 INFO  BlockManagerInfo - Added broadcast_498_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:51.350 INFO  SparkContext - Created broadcast 498 from newAPIHadoopFile at PathSplitSource.java:96
20:33:51.364 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.364 INFO  FileInputFormat - Total input files to process : 1
20:33:51.364 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.384 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:51.384 INFO  DAGScheduler - Got job 186 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:51.384 INFO  DAGScheduler - Final stage: ResultStage 247 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:51.384 INFO  DAGScheduler - Parents of final stage: List()
20:33:51.384 INFO  DAGScheduler - Missing parents: List()
20:33:51.385 INFO  DAGScheduler - Submitting ResultStage 247 (MapPartitionsRDD[1191] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:51.391 INFO  MemoryStore - Block broadcast_499 stored as values in memory (estimated size 148.2 KiB, free 1918.3 MiB)
20:33:51.391 INFO  MemoryStore - Block broadcast_499_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1918.3 MiB)
20:33:51.391 INFO  BlockManagerInfo - Added broadcast_499_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.6 MiB)
20:33:51.392 INFO  SparkContext - Created broadcast 499 from broadcast at DAGScheduler.scala:1580
20:33:51.392 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 247 (MapPartitionsRDD[1191] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:51.392 INFO  TaskSchedulerImpl - Adding task set 247.0 with 1 tasks resource profile 0
20:33:51.392 INFO  TaskSetManager - Starting task 0.0 in stage 247.0 (TID 303) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:51.392 INFO  Executor - Running task 0.0 in stage 247.0 (TID 303)
20:33:51.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741906_1082 replica FinalizedReplica, blk_1073741906_1082, FINALIZED
  getNumBytes()     = 204
  getBytesOnDisk()  = 204
  getVisibleLength()= 204
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741906 for deletion
20:33:51.403 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741899_1075 replica FinalizedReplica, blk_1073741899_1075, FINALIZED
  getNumBytes()     = 212
  getBytesOnDisk()  = 212
  getVisibleLength()= 212
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741899 for deletion
20:33:51.403 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741900_1076 replica FinalizedReplica, blk_1073741900_1076, FINALIZED
  getNumBytes()     = 5472
  getBytesOnDisk()  = 5472
  getVisibleLength()= 5472
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data2
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741900 for deletion
20:33:51.403 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741906_1082 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741906
20:33:51.403 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741899_1075 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741899
20:33:51.403 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741900_1076 URI file:/tmp/minicluster_storage11240959748026123074/data/data2/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741900
20:33:51.409 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam:0+236517
20:33:51.410 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.410 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.411 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.412 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.412 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.413 WARN  DFSUtil - Unexpected value for data transfer bytes=1202 duration=0
20:33:51.414 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:51.415 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:51.417 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:51.420 INFO  Executor - Finished task 0.0 in stage 247.0 (TID 303). 749470 bytes result sent to driver
20:33:51.421 INFO  TaskSetManager - Finished task 0.0 in stage 247.0 (TID 303) in 29 ms on localhost (executor driver) (1/1)
20:33:51.421 INFO  TaskSchedulerImpl - Removed TaskSet 247.0, whose tasks have all completed, from pool 
20:33:51.421 INFO  DAGScheduler - ResultStage 247 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.036 s
20:33:51.421 INFO  DAGScheduler - Job 186 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:51.421 INFO  TaskSchedulerImpl - Killing all running tasks in stage 247: Stage finished
20:33:51.421 INFO  DAGScheduler - Job 186 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.037245 s
20:33:51.437 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:51.437 INFO  DAGScheduler - Got job 187 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:51.437 INFO  DAGScheduler - Final stage: ResultStage 248 (count at ReadsSparkSinkUnitTest.java:185)
20:33:51.437 INFO  DAGScheduler - Parents of final stage: List()
20:33:51.437 INFO  DAGScheduler - Missing parents: List()
20:33:51.437 INFO  DAGScheduler - Submitting ResultStage 248 (MapPartitionsRDD[1173] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:51.454 INFO  MemoryStore - Block broadcast_500 stored as values in memory (estimated size 426.1 KiB, free 1917.9 MiB)
20:33:51.456 INFO  MemoryStore - Block broadcast_500_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1917.7 MiB)
20:33:51.456 INFO  BlockManagerInfo - Added broadcast_500_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.5 MiB)
20:33:51.456 INFO  SparkContext - Created broadcast 500 from broadcast at DAGScheduler.scala:1580
20:33:51.456 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 248 (MapPartitionsRDD[1173] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:51.456 INFO  TaskSchedulerImpl - Adding task set 248.0 with 1 tasks resource profile 0
20:33:51.457 INFO  TaskSetManager - Starting task 0.0 in stage 248.0 (TID 304) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7893 bytes) 
20:33:51.457 INFO  Executor - Running task 0.0 in stage 248.0 (TID 304)
20:33:51.487 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam:0+211123
20:33:51.494 INFO  Executor - Finished task 0.0 in stage 248.0 (TID 304). 989 bytes result sent to driver
20:33:51.494 INFO  TaskSetManager - Finished task 0.0 in stage 248.0 (TID 304) in 37 ms on localhost (executor driver) (1/1)
20:33:51.494 INFO  TaskSchedulerImpl - Removed TaskSet 248.0, whose tasks have all completed, from pool 
20:33:51.494 INFO  DAGScheduler - ResultStage 248 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.056 s
20:33:51.494 INFO  DAGScheduler - Job 187 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:51.494 INFO  TaskSchedulerImpl - Killing all running tasks in stage 248: Stage finished
20:33:51.494 INFO  DAGScheduler - Job 187 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.057245 s
20:33:51.498 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:51.498 INFO  DAGScheduler - Got job 188 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:51.498 INFO  DAGScheduler - Final stage: ResultStage 249 (count at ReadsSparkSinkUnitTest.java:185)
20:33:51.498 INFO  DAGScheduler - Parents of final stage: List()
20:33:51.498 INFO  DAGScheduler - Missing parents: List()
20:33:51.498 INFO  DAGScheduler - Submitting ResultStage 249 (MapPartitionsRDD[1191] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:51.504 INFO  MemoryStore - Block broadcast_501 stored as values in memory (estimated size 148.1 KiB, free 1917.6 MiB)
20:33:51.505 INFO  MemoryStore - Block broadcast_501_piece0 stored as bytes in memory (estimated size 54.6 KiB, free 1917.5 MiB)
20:33:51.505 INFO  BlockManagerInfo - Added broadcast_501_piece0 in memory on localhost:45281 (size: 54.6 KiB, free: 1919.4 MiB)
20:33:51.505 INFO  SparkContext - Created broadcast 501 from broadcast at DAGScheduler.scala:1580
20:33:51.505 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 249 (MapPartitionsRDD[1191] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:51.505 INFO  TaskSchedulerImpl - Adding task set 249.0 with 1 tasks resource profile 0
20:33:51.506 INFO  TaskSetManager - Starting task 0.0 in stage 249.0 (TID 305) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:51.506 INFO  Executor - Running task 0.0 in stage 249.0 (TID 305)
20:33:51.518 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam:0+236517
20:33:51.518 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.519 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam	dst=null	perm=null	proto=rpc
20:33:51.519 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.520 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.520 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest3_f12d6059-6b3d-4ecd-a7aa-555d4906a87e.bam.bai	dst=null	perm=null	proto=rpc
20:33:51.521 WARN  DFSUtil - Unexpected value for data transfer bytes=1202 duration=0
20:33:51.522 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:51.523 WARN  DFSUtil - Unexpected value for data transfer bytes=600 duration=0
20:33:51.524 WARN  DFSUtil - Unexpected value for data transfer bytes=237139 duration=0
20:33:51.524 WARN  DFSUtil - Unexpected value for data transfer bytes=32 duration=0
20:33:51.526 INFO  Executor - Finished task 0.0 in stage 249.0 (TID 305). 989 bytes result sent to driver
20:33:51.526 INFO  TaskSetManager - Finished task 0.0 in stage 249.0 (TID 305) in 20 ms on localhost (executor driver) (1/1)
20:33:51.526 INFO  TaskSchedulerImpl - Removed TaskSet 249.0, whose tasks have all completed, from pool 
20:33:51.526 INFO  DAGScheduler - ResultStage 249 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.028 s
20:33:51.526 INFO  DAGScheduler - Job 188 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:51.526 INFO  TaskSchedulerImpl - Killing all running tasks in stage 249: Stage finished
20:33:51.526 INFO  DAGScheduler - Job 188 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.028583 s
20:33:51.535 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:51.536 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:51.536 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:51.537 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:51.539 INFO  MemoryStore - Block broadcast_502 stored as values in memory (estimated size 576.0 B, free 1917.5 MiB)
20:33:51.539 INFO  MemoryStore - Block broadcast_502_piece0 stored as bytes in memory (estimated size 228.0 B, free 1917.5 MiB)
20:33:51.539 INFO  BlockManagerInfo - Added broadcast_502_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.4 MiB)
20:33:51.539 INFO  SparkContext - Created broadcast 502 from broadcast at CramSource.java:114
20:33:51.540 INFO  MemoryStore - Block broadcast_503 stored as values in memory (estimated size 297.9 KiB, free 1917.2 MiB)
20:33:51.546 INFO  MemoryStore - Block broadcast_503_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1917.2 MiB)
20:33:51.546 INFO  BlockManagerInfo - Added broadcast_503_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:51.546 INFO  SparkContext - Created broadcast 503 from newAPIHadoopFile at PathSplitSource.java:96
20:33:51.561 INFO  MemoryStore - Block broadcast_504 stored as values in memory (estimated size 576.0 B, free 1917.2 MiB)
20:33:51.565 INFO  MemoryStore - Block broadcast_504_piece0 stored as bytes in memory (estimated size 228.0 B, free 1917.2 MiB)
20:33:51.565 INFO  BlockManagerInfo - Added broadcast_504_piece0 in memory on localhost:45281 (size: 228.0 B, free: 1919.4 MiB)
20:33:51.565 INFO  BlockManagerInfo - Removed broadcast_497_piece0 on localhost:45281 in memory (size: 231.0 B, free: 1919.4 MiB)
20:33:51.566 INFO  SparkContext - Created broadcast 504 from broadcast at CramSource.java:114
20:33:51.566 INFO  BlockManagerInfo - Removed broadcast_495_piece0 on localhost:45281 in memory (size: 157.6 KiB, free: 1919.5 MiB)
20:33:51.566 INFO  BlockManagerInfo - Removed broadcast_500_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.7 MiB)
20:33:51.567 INFO  BlockManagerInfo - Removed broadcast_501_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.7 MiB)
20:33:51.567 INFO  MemoryStore - Block broadcast_505 stored as values in memory (estimated size 297.9 KiB, free 1918.2 MiB)
20:33:51.567 INFO  BlockManagerInfo - Removed broadcast_494_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.7 MiB)
20:33:51.567 INFO  BlockManagerInfo - Removed broadcast_493_piece0 on localhost:45281 in memory (size: 1890.0 B, free: 1919.7 MiB)
20:33:51.568 INFO  BlockManagerInfo - Removed broadcast_498_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:51.568 INFO  BlockManagerInfo - Removed broadcast_496_piece0 on localhost:45281 in memory (size: 58.5 KiB, free: 1919.8 MiB)
20:33:51.569 INFO  BlockManagerInfo - Removed broadcast_499_piece0 on localhost:45281 in memory (size: 54.6 KiB, free: 1919.9 MiB)
20:33:51.569 INFO  BlockManagerInfo - Removed broadcast_491_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1920.0 MiB)
20:33:51.574 INFO  MemoryStore - Block broadcast_505_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1919.3 MiB)
20:33:51.574 INFO  BlockManagerInfo - Added broadcast_505_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.9 MiB)
20:33:51.574 INFO  SparkContext - Created broadcast 505 from newAPIHadoopFile at PathSplitSource.java:96
20:33:51.588 INFO  FileInputFormat - Total input files to process : 1
20:33:51.589 INFO  MemoryStore - Block broadcast_506 stored as values in memory (estimated size 6.0 KiB, free 1919.3 MiB)
20:33:51.590 INFO  MemoryStore - Block broadcast_506_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1919.3 MiB)
20:33:51.590 INFO  BlockManagerInfo - Added broadcast_506_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.9 MiB)
20:33:51.590 INFO  SparkContext - Created broadcast 506 from broadcast at ReadsSparkSink.java:133
20:33:51.590 INFO  MemoryStore - Block broadcast_507 stored as values in memory (estimated size 6.2 KiB, free 1919.3 MiB)
20:33:51.591 INFO  MemoryStore - Block broadcast_507_piece0 stored as bytes in memory (estimated size 1473.0 B, free 1919.3 MiB)
20:33:51.591 INFO  BlockManagerInfo - Added broadcast_507_piece0 in memory on localhost:45281 (size: 1473.0 B, free: 1919.9 MiB)
20:33:51.591 INFO  SparkContext - Created broadcast 507 from broadcast at CramSink.java:76
20:33:51.593 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts	dst=null	perm=null	proto=rpc
20:33:51.593 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:51.593 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:51.593 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:51.594 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:51.600 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:51.601 INFO  DAGScheduler - Registering RDD 1203 (mapToPair at SparkUtils.java:161) as input to shuffle 50
20:33:51.601 INFO  DAGScheduler - Got job 189 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:51.601 INFO  DAGScheduler - Final stage: ResultStage 251 (runJob at SparkHadoopWriter.scala:83)
20:33:51.601 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 250)
20:33:51.601 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 250)
20:33:51.601 INFO  DAGScheduler - Submitting ShuffleMapStage 250 (MapPartitionsRDD[1203] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:51.613 INFO  MemoryStore - Block broadcast_508 stored as values in memory (estimated size 292.8 KiB, free 1919.0 MiB)
20:33:51.614 INFO  MemoryStore - Block broadcast_508_piece0 stored as bytes in memory (estimated size 107.3 KiB, free 1918.9 MiB)
20:33:51.614 INFO  BlockManagerInfo - Added broadcast_508_piece0 in memory on localhost:45281 (size: 107.3 KiB, free: 1919.8 MiB)
20:33:51.614 INFO  SparkContext - Created broadcast 508 from broadcast at DAGScheduler.scala:1580
20:33:51.614 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 250 (MapPartitionsRDD[1203] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:51.614 INFO  TaskSchedulerImpl - Adding task set 250.0 with 1 tasks resource profile 0
20:33:51.615 INFO  TaskSetManager - Starting task 0.0 in stage 250.0 (TID 306) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7869 bytes) 
20:33:51.615 INFO  Executor - Running task 0.0 in stage 250.0 (TID 306)
20:33:51.636 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:51.646 INFO  Executor - Finished task 0.0 in stage 250.0 (TID 306). 1148 bytes result sent to driver
20:33:51.646 INFO  TaskSetManager - Finished task 0.0 in stage 250.0 (TID 306) in 31 ms on localhost (executor driver) (1/1)
20:33:51.646 INFO  TaskSchedulerImpl - Removed TaskSet 250.0, whose tasks have all completed, from pool 
20:33:51.646 INFO  DAGScheduler - ShuffleMapStage 250 (mapToPair at SparkUtils.java:161) finished in 0.045 s
20:33:51.646 INFO  DAGScheduler - looking for newly runnable stages
20:33:51.646 INFO  DAGScheduler - running: HashSet()
20:33:51.646 INFO  DAGScheduler - waiting: HashSet(ResultStage 251)
20:33:51.646 INFO  DAGScheduler - failed: HashSet()
20:33:51.647 INFO  DAGScheduler - Submitting ResultStage 251 (MapPartitionsRDD[1208] at mapToPair at CramSink.java:89), which has no missing parents
20:33:51.653 INFO  MemoryStore - Block broadcast_509 stored as values in memory (estimated size 153.3 KiB, free 1918.8 MiB)
20:33:51.654 INFO  MemoryStore - Block broadcast_509_piece0 stored as bytes in memory (estimated size 58.1 KiB, free 1918.7 MiB)
20:33:51.654 INFO  BlockManagerInfo - Added broadcast_509_piece0 in memory on localhost:45281 (size: 58.1 KiB, free: 1919.7 MiB)
20:33:51.654 INFO  SparkContext - Created broadcast 509 from broadcast at DAGScheduler.scala:1580
20:33:51.654 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 251 (MapPartitionsRDD[1208] at mapToPair at CramSink.java:89) (first 15 tasks are for partitions Vector(0))
20:33:51.654 INFO  TaskSchedulerImpl - Adding task set 251.0 with 1 tasks resource profile 0
20:33:51.655 INFO  TaskSetManager - Starting task 0.0 in stage 251.0 (TID 307) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:51.655 INFO  Executor - Running task 0.0 in stage 251.0 (TID 307)
20:33:51.659 INFO  ShuffleBlockFetcherIterator - Getting 1 (82.3 KiB) non-empty blocks including 1 (82.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:51.659 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:51.665 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:51.665 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:51.665 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:51.665 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:51.665 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:51.665 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:51.666 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0/part-r-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:51.687 INFO  StateChange - BLOCK* allocate blk_1073741912_1088, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0/part-r-00000
20:33:51.688 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741912_1088 src: /127.0.0.1:54076 dest: /127.0.0.1:35765
20:33:51.689 INFO  clienttrace - src: /127.0.0.1:54076, dest: /127.0.0.1:35765, bytes: 42659, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741912_1088, duration(ns): 493479
20:33:51.689 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741912_1088, type=LAST_IN_PIPELINE terminating
20:33:51.690 INFO  FSNamesystem - BLOCK* blk_1073741912_1088 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0/part-r-00000
20:33:52.091 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0/part-r-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:52.091 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0	dst=null	perm=null	proto=rpc
20:33:52.092 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0	dst=null	perm=null	proto=rpc
20:33:52.092 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/task_202507152033518541313951951214526_1208_r_000000	dst=null	perm=null	proto=rpc
20:33:52.093 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/_temporary/attempt_202507152033518541313951951214526_1208_r_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/task_202507152033518541313951951214526_1208_r_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:52.093 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033518541313951951214526_1208_r_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/task_202507152033518541313951951214526_1208_r_000000
20:33:52.093 INFO  SparkHadoopMapRedUtil - attempt_202507152033518541313951951214526_1208_r_000000_0: Committed. Elapsed time: 1 ms.
20:33:52.094 INFO  Executor - Finished task 0.0 in stage 251.0 (TID 307). 1858 bytes result sent to driver
20:33:52.094 INFO  TaskSetManager - Finished task 0.0 in stage 251.0 (TID 307) in 439 ms on localhost (executor driver) (1/1)
20:33:52.094 INFO  TaskSchedulerImpl - Removed TaskSet 251.0, whose tasks have all completed, from pool 
20:33:52.094 INFO  DAGScheduler - ResultStage 251 (runJob at SparkHadoopWriter.scala:83) finished in 0.447 s
20:33:52.094 INFO  DAGScheduler - Job 189 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:52.094 INFO  TaskSchedulerImpl - Killing all running tasks in stage 251: Stage finished
20:33:52.094 INFO  DAGScheduler - Job 189 finished: runJob at SparkHadoopWriter.scala:83, took 0.494365 s
20:33:52.095 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033518541313951951214526_1208.
20:33:52.095 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:52.095 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts	dst=null	perm=null	proto=rpc
20:33:52.096 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/task_202507152033518541313951951214526_1208_r_000000	dst=null	perm=null	proto=rpc
20:33:52.096 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/part-r-00000	dst=null	perm=null	proto=rpc
20:33:52.097 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary/0/task_202507152033518541313951951214526_1208_r_000000/part-r-00000	dst=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/part-r-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.097 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:52.098 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.099 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:52.099 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/.spark-staging-1208	dst=null	perm=null	proto=rpc
20:33:52.099 INFO  SparkHadoopWriter - Write Job job_202507152033518541313951951214526_1208 committed. Elapsed time: 4 ms.
20:33:52.100 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.101 INFO  StateChange - BLOCK* allocate blk_1073741913_1089, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/header
20:33:52.102 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741913_1089 src: /127.0.0.1:54086 dest: /127.0.0.1:35765
20:33:52.103 INFO  clienttrace - src: /127.0.0.1:54086, dest: /127.0.0.1:35765, bytes: 1016, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741913_1089, duration(ns): 438540
20:33:52.103 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741913_1089, type=LAST_IN_PIPELINE terminating
20:33:52.104 INFO  FSNamesystem - BLOCK* blk_1073741913_1089 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/header
20:33:52.505 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:52.506 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/terminator	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.507 INFO  StateChange - BLOCK* allocate blk_1073741914_1090, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/terminator
20:33:52.508 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741914_1090 src: /127.0.0.1:54096 dest: /127.0.0.1:35765
20:33:52.509 INFO  clienttrace - src: /127.0.0.1:54096, dest: /127.0.0.1:35765, bytes: 38, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741914_1090, duration(ns): 397291
20:33:52.509 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741914_1090, type=LAST_IN_PIPELINE terminating
20:33:52.509 INFO  FSNamesystem - BLOCK* blk_1073741914_1090 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/terminator
20:33:52.910 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/terminator is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:52.911 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts	dst=null	perm=null	proto=rpc
20:33:52.912 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.912 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:52.913 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram
20:33:52.913 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/header, /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/part-r-00000, /user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/terminator]	dst=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.913 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.914 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.914 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:52.914 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram done
20:33:52.915 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.parts	dst=null	perm=null	proto=rpc
20:33:52.915 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.915 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.915 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.916 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.916 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.crai	dst=null	perm=null	proto=rpc
20:33:52.917 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.crai	dst=null	perm=null	proto=rpc
20:33:52.919 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:52.919 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:52.920 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.920 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.920 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.crai	dst=null	perm=null	proto=rpc
20:33:52.921 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.crai	dst=null	perm=null	proto=rpc
20:33:52.921 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.921 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.922 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:52.922 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:52.923 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:52.923 INFO  MemoryStore - Block broadcast_510 stored as values in memory (estimated size 528.0 B, free 1918.7 MiB)
20:33:52.923 INFO  MemoryStore - Block broadcast_510_piece0 stored as bytes in memory (estimated size 187.0 B, free 1918.7 MiB)
20:33:52.923 INFO  BlockManagerInfo - Added broadcast_510_piece0 in memory on localhost:45281 (size: 187.0 B, free: 1919.7 MiB)
20:33:52.924 INFO  SparkContext - Created broadcast 510 from broadcast at CramSource.java:114
20:33:52.925 INFO  MemoryStore - Block broadcast_511 stored as values in memory (estimated size 297.9 KiB, free 1918.4 MiB)
20:33:52.931 INFO  MemoryStore - Block broadcast_511_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.4 MiB)
20:33:52.931 INFO  BlockManagerInfo - Added broadcast_511_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:52.931 INFO  SparkContext - Created broadcast 511 from newAPIHadoopFile at PathSplitSource.java:96
20:33:52.945 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.946 INFO  FileInputFormat - Total input files to process : 1
20:33:52.946 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:52.972 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:52.972 INFO  DAGScheduler - Got job 190 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:52.972 INFO  DAGScheduler - Final stage: ResultStage 252 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:52.972 INFO  DAGScheduler - Parents of final stage: List()
20:33:52.972 INFO  DAGScheduler - Missing parents: List()
20:33:52.973 INFO  DAGScheduler - Submitting ResultStage 252 (MapPartitionsRDD[1214] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:52.984 INFO  MemoryStore - Block broadcast_512 stored as values in memory (estimated size 286.8 KiB, free 1918.1 MiB)
20:33:52.985 INFO  MemoryStore - Block broadcast_512_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1918.0 MiB)
20:33:52.985 INFO  BlockManagerInfo - Added broadcast_512_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.6 MiB)
20:33:52.985 INFO  SparkContext - Created broadcast 512 from broadcast at DAGScheduler.scala:1580
20:33:52.985 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 252 (MapPartitionsRDD[1214] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:52.985 INFO  TaskSchedulerImpl - Adding task set 252.0 with 1 tasks resource profile 0
20:33:52.986 INFO  TaskSetManager - Starting task 0.0 in stage 252.0 (TID 308) (localhost, executor driver, partition 0, ANY, 7853 bytes) 
20:33:52.986 INFO  Executor - Running task 0.0 in stage 252.0 (TID 308)
20:33:53.007 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram:0+43713
20:33:53.007 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:53.008 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:53.008 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.crai	dst=null	perm=null	proto=rpc
20:33:53.009 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.crai	dst=null	perm=null	proto=rpc
20:33:53.010 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:53.011 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:53.011 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:53.023 INFO  Executor - Finished task 0.0 in stage 252.0 (TID 308). 154101 bytes result sent to driver
20:33:53.024 INFO  TaskSetManager - Finished task 0.0 in stage 252.0 (TID 308) in 38 ms on localhost (executor driver) (1/1)
20:33:53.024 INFO  TaskSchedulerImpl - Removed TaskSet 252.0, whose tasks have all completed, from pool 
20:33:53.024 INFO  DAGScheduler - ResultStage 252 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.051 s
20:33:53.025 INFO  DAGScheduler - Job 190 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:53.025 INFO  TaskSchedulerImpl - Killing all running tasks in stage 252: Stage finished
20:33:53.025 INFO  DAGScheduler - Job 190 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.052553 s
20:33:53.030 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:53.030 INFO  DAGScheduler - Got job 191 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:53.030 INFO  DAGScheduler - Final stage: ResultStage 253 (count at ReadsSparkSinkUnitTest.java:185)
20:33:53.030 INFO  DAGScheduler - Parents of final stage: List()
20:33:53.030 INFO  DAGScheduler - Missing parents: List()
20:33:53.030 INFO  DAGScheduler - Submitting ResultStage 253 (MapPartitionsRDD[1197] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:53.042 INFO  MemoryStore - Block broadcast_513 stored as values in memory (estimated size 286.8 KiB, free 1917.7 MiB)
20:33:53.043 INFO  MemoryStore - Block broadcast_513_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1917.6 MiB)
20:33:53.043 INFO  BlockManagerInfo - Added broadcast_513_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.5 MiB)
20:33:53.043 INFO  SparkContext - Created broadcast 513 from broadcast at DAGScheduler.scala:1580
20:33:53.043 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 253 (MapPartitionsRDD[1197] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:53.043 INFO  TaskSchedulerImpl - Adding task set 253.0 with 1 tasks resource profile 0
20:33:53.044 INFO  TaskSetManager - Starting task 0.0 in stage 253.0 (TID 309) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7880 bytes) 
20:33:53.044 INFO  Executor - Running task 0.0 in stage 253.0 (TID 309)
20:33:53.065 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram:0+50619
20:33:53.071 INFO  Executor - Finished task 0.0 in stage 253.0 (TID 309). 989 bytes result sent to driver
20:33:53.071 INFO  TaskSetManager - Finished task 0.0 in stage 253.0 (TID 309) in 27 ms on localhost (executor driver) (1/1)
20:33:53.071 INFO  TaskSchedulerImpl - Removed TaskSet 253.0, whose tasks have all completed, from pool 
20:33:53.071 INFO  DAGScheduler - ResultStage 253 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.040 s
20:33:53.071 INFO  DAGScheduler - Job 191 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:53.072 INFO  TaskSchedulerImpl - Killing all running tasks in stage 253: Stage finished
20:33:53.072 INFO  DAGScheduler - Job 191 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.041614 s
20:33:53.075 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:53.075 INFO  DAGScheduler - Got job 192 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:53.075 INFO  DAGScheduler - Final stage: ResultStage 254 (count at ReadsSparkSinkUnitTest.java:185)
20:33:53.075 INFO  DAGScheduler - Parents of final stage: List()
20:33:53.075 INFO  DAGScheduler - Missing parents: List()
20:33:53.075 INFO  DAGScheduler - Submitting ResultStage 254 (MapPartitionsRDD[1214] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:53.087 INFO  MemoryStore - Block broadcast_514 stored as values in memory (estimated size 286.8 KiB, free 1917.3 MiB)
20:33:53.091 INFO  BlockManagerInfo - Removed broadcast_504_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.5 MiB)
20:33:53.091 INFO  BlockManagerInfo - Removed broadcast_512_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.6 MiB)
20:33:53.091 INFO  MemoryStore - Block broadcast_514_piece0 stored as bytes in memory (estimated size 103.6 KiB, free 1917.6 MiB)
20:33:53.091 INFO  BlockManagerInfo - Added broadcast_514_piece0 in memory on localhost:45281 (size: 103.6 KiB, free: 1919.5 MiB)
20:33:53.092 INFO  BlockManagerInfo - Removed broadcast_509_piece0 on localhost:45281 in memory (size: 58.1 KiB, free: 1919.5 MiB)
20:33:53.092 INFO  SparkContext - Created broadcast 514 from broadcast at DAGScheduler.scala:1580
20:33:53.092 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 254 (MapPartitionsRDD[1214] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:53.092 INFO  BlockManagerInfo - Removed broadcast_507_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.5 MiB)
20:33:53.092 INFO  TaskSchedulerImpl - Adding task set 254.0 with 1 tasks resource profile 0
20:33:53.092 INFO  BlockManagerInfo - Removed broadcast_506_piece0 on localhost:45281 in memory (size: 1473.0 B, free: 1919.5 MiB)
20:33:53.093 INFO  TaskSetManager - Starting task 0.0 in stage 254.0 (TID 310) (localhost, executor driver, partition 0, ANY, 7853 bytes) 
20:33:53.093 INFO  BlockManagerInfo - Removed broadcast_505_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:53.093 INFO  Executor - Running task 0.0 in stage 254.0 (TID 310)
20:33:53.093 INFO  BlockManagerInfo - Removed broadcast_508_piece0 on localhost:45281 in memory (size: 107.3 KiB, free: 1919.7 MiB)
20:33:53.093 INFO  BlockManagerInfo - Removed broadcast_513_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.8 MiB)
20:33:53.114 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram:0+43713
20:33:53.114 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:53.115 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram	dst=null	perm=null	proto=rpc
20:33:53.116 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.cram.crai	dst=null	perm=null	proto=rpc
20:33:53.116 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest5_1e5fa13c-a2b6-4004-8bac-e571457d8faf.crai	dst=null	perm=null	proto=rpc
20:33:53.117 WARN  DFSUtil - Unexpected value for data transfer bytes=1024 duration=0
20:33:53.118 WARN  DFSUtil - Unexpected value for data transfer bytes=42995 duration=0
20:33:53.118 WARN  DFSUtil - Unexpected value for data transfer bytes=42 duration=0
20:33:53.126 INFO  Executor - Finished task 0.0 in stage 254.0 (TID 310). 989 bytes result sent to driver
20:33:53.126 INFO  TaskSetManager - Finished task 0.0 in stage 254.0 (TID 310) in 34 ms on localhost (executor driver) (1/1)
20:33:53.126 INFO  TaskSchedulerImpl - Removed TaskSet 254.0, whose tasks have all completed, from pool 
20:33:53.126 INFO  DAGScheduler - ResultStage 254 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.051 s
20:33:53.126 INFO  DAGScheduler - Job 192 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:53.126 INFO  TaskSchedulerImpl - Killing all running tasks in stage 254: Stage finished
20:33:53.127 INFO  DAGScheduler - Job 192 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.051684 s
20:33:53.140 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:53.141 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:53.141 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:53.142 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:53.144 INFO  MemoryStore - Block broadcast_515 stored as values in memory (estimated size 297.9 KiB, free 1918.6 MiB)
20:33:53.150 INFO  MemoryStore - Block broadcast_515_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.6 MiB)
20:33:53.150 INFO  BlockManagerInfo - Added broadcast_515_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:53.151 INFO  SparkContext - Created broadcast 515 from newAPIHadoopFile at PathSplitSource.java:96
20:33:53.172 INFO  MemoryStore - Block broadcast_516 stored as values in memory (estimated size 297.9 KiB, free 1918.3 MiB)
20:33:53.178 INFO  MemoryStore - Block broadcast_516_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.3 MiB)
20:33:53.179 INFO  BlockManagerInfo - Added broadcast_516_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.7 MiB)
20:33:53.179 INFO  SparkContext - Created broadcast 516 from newAPIHadoopFile at PathSplitSource.java:96
20:33:53.199 INFO  FileInputFormat - Total input files to process : 1
20:33:53.201 INFO  MemoryStore - Block broadcast_517 stored as values in memory (estimated size 160.7 KiB, free 1918.1 MiB)
20:33:53.202 INFO  MemoryStore - Block broadcast_517_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.1 MiB)
20:33:53.202 INFO  BlockManagerInfo - Added broadcast_517_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.7 MiB)
20:33:53.202 INFO  SparkContext - Created broadcast 517 from broadcast at ReadsSparkSink.java:133
20:33:53.206 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts	dst=null	perm=null	proto=rpc
20:33:53.206 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:53.206 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:53.206 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:53.207 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=mkdirs	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0	dst=null	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:53.213 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:53.214 INFO  DAGScheduler - Registering RDD 1228 (mapToPair at SparkUtils.java:161) as input to shuffle 51
20:33:53.214 INFO  DAGScheduler - Got job 193 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:53.214 INFO  DAGScheduler - Final stage: ResultStage 256 (runJob at SparkHadoopWriter.scala:83)
20:33:53.214 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 255)
20:33:53.214 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 255)
20:33:53.214 INFO  DAGScheduler - Submitting ShuffleMapStage 255 (MapPartitionsRDD[1228] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:53.233 INFO  MemoryStore - Block broadcast_518 stored as values in memory (estimated size 520.4 KiB, free 1917.6 MiB)
20:33:53.234 INFO  MemoryStore - Block broadcast_518_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.4 MiB)
20:33:53.234 INFO  BlockManagerInfo - Added broadcast_518_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.5 MiB)
20:33:53.234 INFO  SparkContext - Created broadcast 518 from broadcast at DAGScheduler.scala:1580
20:33:53.234 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 255 (MapPartitionsRDD[1228] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:53.234 INFO  TaskSchedulerImpl - Adding task set 255.0 with 1 tasks resource profile 0
20:33:53.235 INFO  TaskSetManager - Starting task 0.0 in stage 255.0 (TID 311) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:53.235 INFO  Executor - Running task 0.0 in stage 255.0 (TID 311)
20:33:53.265 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:53.283 INFO  Executor - Finished task 0.0 in stage 255.0 (TID 311). 1148 bytes result sent to driver
20:33:53.284 INFO  TaskSetManager - Finished task 0.0 in stage 255.0 (TID 311) in 48 ms on localhost (executor driver) (1/1)
20:33:53.284 INFO  TaskSchedulerImpl - Removed TaskSet 255.0, whose tasks have all completed, from pool 
20:33:53.284 INFO  DAGScheduler - ShuffleMapStage 255 (mapToPair at SparkUtils.java:161) finished in 0.070 s
20:33:53.284 INFO  DAGScheduler - looking for newly runnable stages
20:33:53.284 INFO  DAGScheduler - running: HashSet()
20:33:53.284 INFO  DAGScheduler - waiting: HashSet(ResultStage 256)
20:33:53.284 INFO  DAGScheduler - failed: HashSet()
20:33:53.284 INFO  DAGScheduler - Submitting ResultStage 256 (MapPartitionsRDD[1234] at saveAsTextFile at SamSink.java:65), which has no missing parents
20:33:53.291 INFO  MemoryStore - Block broadcast_519 stored as values in memory (estimated size 241.1 KiB, free 1917.2 MiB)
20:33:53.291 INFO  MemoryStore - Block broadcast_519_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1917.1 MiB)
20:33:53.292 INFO  BlockManagerInfo - Added broadcast_519_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.5 MiB)
20:33:53.292 INFO  SparkContext - Created broadcast 519 from broadcast at DAGScheduler.scala:1580
20:33:53.292 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 256 (MapPartitionsRDD[1234] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
20:33:53.292 INFO  TaskSchedulerImpl - Adding task set 256.0 with 1 tasks resource profile 0
20:33:53.292 INFO  TaskSetManager - Starting task 0.0 in stage 256.0 (TID 312) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:53.293 INFO  Executor - Running task 0.0 in stage 256.0 (TID 312)
20:33:53.297 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:53.297 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:53.308 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
20:33:53.308 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:53.308 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:53.309 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0/part-00000	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:53.310 INFO  StateChange - BLOCK* allocate blk_1073741915_1091, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0/part-00000
20:33:53.311 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741915_1091 src: /127.0.0.1:54100 dest: /127.0.0.1:35765
20:33:53.315 INFO  clienttrace - src: /127.0.0.1:54100, dest: /127.0.0.1:35765, bytes: 761729, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741915_1091, duration(ns): 3728275
20:33:53.316 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741915_1091, type=LAST_IN_PIPELINE terminating
20:33:53.316 INFO  FSNamesystem - BLOCK* blk_1073741915_1091 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0/part-00000
20:33:53.717 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0/part-00000 is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:53.717 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0	dst=null	perm=null	proto=rpc
20:33:53.718 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0	dst=null	perm=null	proto=rpc
20:33:53.718 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/task_202507152033535114627543367634740_1234_m_000000	dst=null	perm=null	proto=rpc
20:33:53.719 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/_temporary/attempt_202507152033535114627543367634740_1234_m_000000_0	dst=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/task_202507152033535114627543367634740_1234_m_000000	perm=runner:supergroup:rwxr-xr-x	proto=rpc
20:33:53.719 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033535114627543367634740_1234_m_000000_0' to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/task_202507152033535114627543367634740_1234_m_000000
20:33:53.719 INFO  SparkHadoopMapRedUtil - attempt_202507152033535114627543367634740_1234_m_000000_0: Committed. Elapsed time: 1 ms.
20:33:53.720 INFO  Executor - Finished task 0.0 in stage 256.0 (TID 312). 1858 bytes result sent to driver
20:33:53.720 INFO  TaskSetManager - Finished task 0.0 in stage 256.0 (TID 312) in 428 ms on localhost (executor driver) (1/1)
20:33:53.720 INFO  TaskSchedulerImpl - Removed TaskSet 256.0, whose tasks have all completed, from pool 
20:33:53.720 INFO  DAGScheduler - ResultStage 256 (runJob at SparkHadoopWriter.scala:83) finished in 0.436 s
20:33:53.720 INFO  DAGScheduler - Job 193 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:53.720 INFO  TaskSchedulerImpl - Killing all running tasks in stage 256: Stage finished
20:33:53.720 INFO  DAGScheduler - Job 193 finished: runJob at SparkHadoopWriter.scala:83, took 0.507124 s
20:33:53.721 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033535114627543367634740_1234.
20:33:53.721 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0	dst=null	perm=null	proto=rpc
20:33:53.722 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts	dst=null	perm=null	proto=rpc
20:33:53.722 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/task_202507152033535114627543367634740_1234_m_000000	dst=null	perm=null	proto=rpc
20:33:53.722 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/part-00000	dst=null	perm=null	proto=rpc
20:33:53.723 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary/0/task_202507152033535114627543367634740_1234_m_000000/part-00000	dst=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/part-00000	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:53.724 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_temporary	dst=null	perm=null	proto=rpc
20:33:53.724 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_SUCCESS	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:53.725 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/_SUCCESS is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:53.725 INFO  audit - allowed=false	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/.spark-staging-1234	dst=null	perm=null	proto=rpc
20:33:53.726 INFO  SparkHadoopWriter - Write Job job_202507152033535114627543367634740_1234 committed. Elapsed time: 4 ms.
20:33:53.726 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/header	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:53.727 INFO  StateChange - BLOCK* allocate blk_1073741916_1092, replicas=127.0.0.1:35765 for /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/header
20:33:53.728 INFO  DataNode - Receiving BP-169878775-10.1.0.111-1752611591188:blk_1073741916_1092 src: /127.0.0.1:54112 dest: /127.0.0.1:35765
20:33:53.729 INFO  clienttrace - src: /127.0.0.1:54112, dest: /127.0.0.1:35765, bytes: 85829, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_240628966_1, offset: 0, srvID: 5d06df6f-6d54-439e-a2bc-24e6ef6e242e, blockid: BP-169878775-10.1.0.111-1752611591188:blk_1073741916_1092, duration(ns): 585853
20:33:53.729 INFO  DataNode - PacketResponder: BP-169878775-10.1.0.111-1752611591188:blk_1073741916_1092, type=LAST_IN_PIPELINE terminating
20:33:53.729 INFO  FSNamesystem - BLOCK* blk_1073741916_1092 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/header
20:33:54.130 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/header is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:54.131 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=listStatus	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts	dst=null	perm=null	proto=rpc
20:33:54.132 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=create	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/output	dst=null	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:54.132 INFO  StateChange - DIR* completeFile: /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/output is closed by DFSClient_NONMAPREDUCE_240628966_1
20:33:54.133 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam
20:33:54.133 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=concat	src=[/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/header, /user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/part-00000]	dst=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/output	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:54.133 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.134 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.134 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=rename	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts/output	dst=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	perm=runner:supergroup:rw-r--r--	proto=rpc
20:33:54.134 INFO  HadoopFileSystemWrapper - Concatenating to hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam done
20:33:54.135 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=delete	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam.parts	dst=null	perm=null	proto=rpc
20:33:54.135 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.135 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.136 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.136 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
WARNING	2025-07-15 20:33:54	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:54.138 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:54.139 WARN  DFSUtil - Unexpected value for data transfer bytes=767681 duration=0
20:33:54.139 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.139 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.139 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
WARNING	2025-07-15 20:33:54	SamReaderFactory	Unable to detect file format from input URL or stream, assuming SAM format.
20:33:54.141 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:54.142 WARN  DFSUtil - Unexpected value for data transfer bytes=767681 duration=0
20:33:54.142 INFO  MemoryStore - Block broadcast_520 stored as values in memory (estimated size 160.7 KiB, free 1917.0 MiB)
20:33:54.143 INFO  MemoryStore - Block broadcast_520_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1917.0 MiB)
20:33:54.143 INFO  BlockManagerInfo - Added broadcast_520_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.5 MiB)
20:33:54.143 INFO  SparkContext - Created broadcast 520 from broadcast at SamSource.java:78
20:33:54.144 INFO  MemoryStore - Block broadcast_521 stored as values in memory (estimated size 297.9 KiB, free 1916.7 MiB)
20:33:54.150 INFO  MemoryStore - Block broadcast_521_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.6 MiB)
20:33:54.151 INFO  BlockManagerInfo - Added broadcast_521_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.4 MiB)
20:33:54.151 INFO  SparkContext - Created broadcast 521 from newAPIHadoopFile at SamSource.java:108
20:33:54.153 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=getfileinfo	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.153 INFO  FileInputFormat - Total input files to process : 1
20:33:54.154 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.158 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:54.158 INFO  DAGScheduler - Got job 194 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:54.158 INFO  DAGScheduler - Final stage: ResultStage 257 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:54.158 INFO  DAGScheduler - Parents of final stage: List()
20:33:54.158 INFO  DAGScheduler - Missing parents: List()
20:33:54.158 INFO  DAGScheduler - Submitting ResultStage 257 (MapPartitionsRDD[1239] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:54.159 INFO  MemoryStore - Block broadcast_522 stored as values in memory (estimated size 7.5 KiB, free 1916.6 MiB)
20:33:54.159 INFO  MemoryStore - Block broadcast_522_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1916.6 MiB)
20:33:54.159 INFO  BlockManagerInfo - Added broadcast_522_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.4 MiB)
20:33:54.159 INFO  SparkContext - Created broadcast 522 from broadcast at DAGScheduler.scala:1580
20:33:54.159 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 257 (MapPartitionsRDD[1239] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:54.159 INFO  TaskSchedulerImpl - Adding task set 257.0 with 1 tasks resource profile 0
20:33:54.160 INFO  TaskSetManager - Starting task 0.0 in stage 257.0 (TID 313) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:54.160 INFO  Executor - Running task 0.0 in stage 257.0 (TID 313)
20:33:54.161 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam:0+847558
20:33:54.163 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.164 WARN  DFSUtil - Unexpected value for data transfer bytes=86501 duration=0
20:33:54.174 INFO  Executor - Finished task 0.0 in stage 257.0 (TID 313). 651483 bytes result sent to driver
20:33:54.176 INFO  TaskSetManager - Finished task 0.0 in stage 257.0 (TID 313) in 16 ms on localhost (executor driver) (1/1)
20:33:54.176 INFO  TaskSchedulerImpl - Removed TaskSet 257.0, whose tasks have all completed, from pool 
20:33:54.176 INFO  DAGScheduler - ResultStage 257 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.018 s
20:33:54.176 INFO  DAGScheduler - Job 194 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.176 INFO  TaskSchedulerImpl - Killing all running tasks in stage 257: Stage finished
20:33:54.176 INFO  DAGScheduler - Job 194 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.018300 s
20:33:54.191 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:54.191 INFO  DAGScheduler - Got job 195 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:54.191 INFO  DAGScheduler - Final stage: ResultStage 258 (count at ReadsSparkSinkUnitTest.java:185)
20:33:54.191 INFO  DAGScheduler - Parents of final stage: List()
20:33:54.191 INFO  DAGScheduler - Missing parents: List()
20:33:54.191 INFO  DAGScheduler - Submitting ResultStage 258 (MapPartitionsRDD[1221] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:54.211 INFO  MemoryStore - Block broadcast_523 stored as values in memory (estimated size 426.1 KiB, free 1916.2 MiB)
20:33:54.212 INFO  MemoryStore - Block broadcast_523_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.0 MiB)
20:33:54.212 INFO  BlockManagerInfo - Added broadcast_523_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.3 MiB)
20:33:54.212 INFO  SparkContext - Created broadcast 523 from broadcast at DAGScheduler.scala:1580
20:33:54.212 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 258 (MapPartitionsRDD[1221] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:54.212 INFO  TaskSchedulerImpl - Adding task set 258.0 with 1 tasks resource profile 0
20:33:54.213 INFO  TaskSetManager - Starting task 0.0 in stage 258.0 (TID 314) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:54.213 INFO  Executor - Running task 0.0 in stage 258.0 (TID 314)
20:33:54.243 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:54.252 INFO  Executor - Finished task 0.0 in stage 258.0 (TID 314). 989 bytes result sent to driver
20:33:54.252 INFO  TaskSetManager - Finished task 0.0 in stage 258.0 (TID 314) in 39 ms on localhost (executor driver) (1/1)
20:33:54.253 INFO  TaskSchedulerImpl - Removed TaskSet 258.0, whose tasks have all completed, from pool 
20:33:54.253 INFO  DAGScheduler - ResultStage 258 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.062 s
20:33:54.253 INFO  DAGScheduler - Job 195 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.253 INFO  TaskSchedulerImpl - Killing all running tasks in stage 258: Stage finished
20:33:54.253 INFO  DAGScheduler - Job 195 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.062177 s
20:33:54.256 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:54.256 INFO  DAGScheduler - Got job 196 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:54.256 INFO  DAGScheduler - Final stage: ResultStage 259 (count at ReadsSparkSinkUnitTest.java:185)
20:33:54.256 INFO  DAGScheduler - Parents of final stage: List()
20:33:54.256 INFO  DAGScheduler - Missing parents: List()
20:33:54.256 INFO  DAGScheduler - Submitting ResultStage 259 (MapPartitionsRDD[1239] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:54.257 INFO  MemoryStore - Block broadcast_524 stored as values in memory (estimated size 7.4 KiB, free 1916.0 MiB)
20:33:54.262 INFO  MemoryStore - Block broadcast_524_piece0 stored as bytes in memory (estimated size 3.8 KiB, free 1916.0 MiB)
20:33:54.262 INFO  BlockManagerInfo - Added broadcast_524_piece0 in memory on localhost:45281 (size: 3.8 KiB, free: 1919.2 MiB)
20:33:54.262 INFO  BlockManagerInfo - Removed broadcast_517_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:54.262 INFO  SparkContext - Created broadcast 524 from broadcast at DAGScheduler.scala:1580
20:33:54.263 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 259 (MapPartitionsRDD[1239] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:54.263 INFO  TaskSchedulerImpl - Adding task set 259.0 with 1 tasks resource profile 0
20:33:54.263 INFO  BlockManagerInfo - Removed broadcast_502_piece0 on localhost:45281 in memory (size: 228.0 B, free: 1919.3 MiB)
20:33:54.263 INFO  TaskSetManager - Starting task 0.0 in stage 259.0 (TID 315) (localhost, executor driver, partition 0, ANY, 7852 bytes) 
20:33:54.264 INFO  Executor - Running task 0.0 in stage 259.0 (TID 315)
20:33:54.264 INFO  BlockManagerInfo - Removed broadcast_522_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1919.3 MiB)
20:33:54.264 INFO  BlockManagerInfo - Removed broadcast_519_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.3 MiB)
20:33:54.265 INFO  BlockManagerInfo - Removed broadcast_518_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.5 MiB)
20:33:54.265 INFO  BlockManagerInfo - Removed broadcast_511_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:54.265 INFO  NewHadoopRDD - Input split: hdfs://localhost:44977/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam:0+847558
20:33:54.266 INFO  BlockManagerInfo - Removed broadcast_503_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:54.266 INFO  BlockManagerInfo - Removed broadcast_510_piece0 on localhost:45281 in memory (size: 187.0 B, free: 1919.6 MiB)
20:33:54.266 INFO  BlockManagerInfo - Removed broadcast_516_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.6 MiB)
20:33:54.267 INFO  BlockManagerInfo - Removed broadcast_514_piece0 on localhost:45281 in memory (size: 103.6 KiB, free: 1919.7 MiB)
20:33:54.267 INFO  audit - allowed=true	ugi=runner (auth:SIMPLE)	ip=/127.0.0.1	cmd=open	src=/user/runner/ReadsSparkSinkUnitTest6_96fdb7c3-c2d0-4100-afcc-009e184a054d.sam	dst=null	perm=null	proto=rpc
20:33:54.268 INFO  BlockManagerInfo - Removed broadcast_523_piece0 on localhost:45281 in memory (size: 153.6 KiB, free: 1919.9 MiB)
20:33:54.274 INFO  Executor - Finished task 0.0 in stage 259.0 (TID 315). 989 bytes result sent to driver
20:33:54.275 INFO  TaskSetManager - Finished task 0.0 in stage 259.0 (TID 315) in 12 ms on localhost (executor driver) (1/1)
20:33:54.275 INFO  TaskSchedulerImpl - Removed TaskSet 259.0, whose tasks have all completed, from pool 
20:33:54.275 INFO  DAGScheduler - ResultStage 259 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.018 s
20:33:54.275 INFO  DAGScheduler - Job 196 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.275 INFO  TaskSchedulerImpl - Killing all running tasks in stage 259: Stage finished
20:33:54.275 INFO  DAGScheduler - Job 196 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.018933 s
20:33:54.279 INFO  MemoryStore - Block broadcast_525 stored as values in memory (estimated size 297.9 KiB, free 1918.9 MiB)
20:33:54.286 INFO  MemoryStore - Block broadcast_525_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.8 MiB)
20:33:54.286 INFO  BlockManagerInfo - Added broadcast_525_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:54.286 INFO  SparkContext - Created broadcast 525 from newAPIHadoopFile at PathSplitSource.java:96
20:33:54.308 INFO  MemoryStore - Block broadcast_526 stored as values in memory (estimated size 297.9 KiB, free 1918.5 MiB)
20:33:54.314 INFO  MemoryStore - Block broadcast_526_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1918.5 MiB)
20:33:54.314 INFO  BlockManagerInfo - Added broadcast_526_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.8 MiB)
20:33:54.314 INFO  SparkContext - Created broadcast 526 from newAPIHadoopFile at PathSplitSource.java:96
20:33:54.334 INFO  FileInputFormat - Total input files to process : 1
20:33:54.336 INFO  MemoryStore - Block broadcast_527 stored as values in memory (estimated size 160.7 KiB, free 1918.3 MiB)
20:33:54.337 INFO  MemoryStore - Block broadcast_527_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.3 MiB)
20:33:54.337 INFO  BlockManagerInfo - Added broadcast_527_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.8 MiB)
20:33:54.337 INFO  SparkContext - Created broadcast 527 from broadcast at ReadsSparkSink.java:133
20:33:54.339 INFO  MemoryStore - Block broadcast_528 stored as values in memory (estimated size 163.2 KiB, free 1918.1 MiB)
20:33:54.339 INFO  MemoryStore - Block broadcast_528_piece0 stored as bytes in memory (estimated size 9.6 KiB, free 1918.1 MiB)
20:33:54.339 INFO  BlockManagerInfo - Added broadcast_528_piece0 in memory on localhost:45281 (size: 9.6 KiB, free: 1919.8 MiB)
20:33:54.340 INFO  SparkContext - Created broadcast 528 from broadcast at BamSink.java:76
20:33:54.341 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:54.341 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:54.341 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:54.358 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
20:33:54.358 INFO  DAGScheduler - Registering RDD 1253 (mapToPair at SparkUtils.java:161) as input to shuffle 52
20:33:54.359 INFO  DAGScheduler - Got job 197 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
20:33:54.359 INFO  DAGScheduler - Final stage: ResultStage 261 (runJob at SparkHadoopWriter.scala:83)
20:33:54.359 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 260)
20:33:54.359 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 260)
20:33:54.359 INFO  DAGScheduler - Submitting ShuffleMapStage 260 (MapPartitionsRDD[1253] at mapToPair at SparkUtils.java:161), which has no missing parents
20:33:54.376 INFO  MemoryStore - Block broadcast_529 stored as values in memory (estimated size 520.4 KiB, free 1917.6 MiB)
20:33:54.378 INFO  MemoryStore - Block broadcast_529_piece0 stored as bytes in memory (estimated size 166.1 KiB, free 1917.5 MiB)
20:33:54.378 INFO  BlockManagerInfo - Added broadcast_529_piece0 in memory on localhost:45281 (size: 166.1 KiB, free: 1919.6 MiB)
20:33:54.378 INFO  SparkContext - Created broadcast 529 from broadcast at DAGScheduler.scala:1580
20:33:54.378 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 260 (MapPartitionsRDD[1253] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
20:33:54.378 INFO  TaskSchedulerImpl - Adding task set 260.0 with 1 tasks resource profile 0
20:33:54.379 INFO  TaskSetManager - Starting task 0.0 in stage 260.0 (TID 316) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7861 bytes) 
20:33:54.379 INFO  Executor - Running task 0.0 in stage 260.0 (TID 316)
20:33:54.402 INFO  FsDatasetAsyncDiskService - Scheduling blk_1073741907_1083 replica FinalizedReplica, blk_1073741907_1083, FINALIZED
  getNumBytes()     = 592
  getBytesOnDisk()  = 592
  getVisibleLength()= 592
  getVolume()       = /tmp/minicluster_storage11240959748026123074/data/data1
  getBlockURI()     = file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741907 for deletion
20:33:54.402 INFO  FsDatasetAsyncDiskService - Deleted BP-169878775-10.1.0.111-1752611591188 blk_1073741907_1083 URI file:/tmp/minicluster_storage11240959748026123074/data/data1/current/BP-169878775-10.1.0.111-1752611591188/current/finalized/subdir0/subdir0/blk_1073741907
20:33:54.410 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:54.425 INFO  Executor - Finished task 0.0 in stage 260.0 (TID 316). 1148 bytes result sent to driver
20:33:54.425 INFO  TaskSetManager - Finished task 0.0 in stage 260.0 (TID 316) in 46 ms on localhost (executor driver) (1/1)
20:33:54.425 INFO  TaskSchedulerImpl - Removed TaskSet 260.0, whose tasks have all completed, from pool 
20:33:54.425 INFO  DAGScheduler - ShuffleMapStage 260 (mapToPair at SparkUtils.java:161) finished in 0.066 s
20:33:54.425 INFO  DAGScheduler - looking for newly runnable stages
20:33:54.425 INFO  DAGScheduler - running: HashSet()
20:33:54.425 INFO  DAGScheduler - waiting: HashSet(ResultStage 261)
20:33:54.425 INFO  DAGScheduler - failed: HashSet()
20:33:54.425 INFO  DAGScheduler - Submitting ResultStage 261 (MapPartitionsRDD[1258] at mapToPair at BamSink.java:91), which has no missing parents
20:33:54.433 INFO  MemoryStore - Block broadcast_530 stored as values in memory (estimated size 241.4 KiB, free 1917.2 MiB)
20:33:54.433 INFO  MemoryStore - Block broadcast_530_piece0 stored as bytes in memory (estimated size 67.0 KiB, free 1917.2 MiB)
20:33:54.433 INFO  BlockManagerInfo - Added broadcast_530_piece0 in memory on localhost:45281 (size: 67.0 KiB, free: 1919.5 MiB)
20:33:54.434 INFO  SparkContext - Created broadcast 530 from broadcast at DAGScheduler.scala:1580
20:33:54.434 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 261 (MapPartitionsRDD[1258] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
20:33:54.434 INFO  TaskSchedulerImpl - Adding task set 261.0 with 1 tasks resource profile 0
20:33:54.434 INFO  TaskSetManager - Starting task 0.0 in stage 261.0 (TID 317) (localhost, executor driver, partition 0, NODE_LOCAL, 7513 bytes) 
20:33:54.434 INFO  Executor - Running task 0.0 in stage 261.0 (TID 317)
20:33:54.438 INFO  ShuffleBlockFetcherIterator - Getting 1 (343.8 KiB) non-empty blocks including 1 (343.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
20:33:54.439 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
20:33:54.449 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:54.449 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:54.449 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:54.450 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
20:33:54.450 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
20:33:54.450 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
20:33:54.472 INFO  FileOutputCommitter - Saved output of task 'attempt_202507152033544058447364982349331_1258_r_000000_0' to file:/tmp/ReadsSparkSinkUnitTest114189241005654697824.bam.parts/_temporary/0/task_202507152033544058447364982349331_1258_r_000000
20:33:54.472 INFO  SparkHadoopMapRedUtil - attempt_202507152033544058447364982349331_1258_r_000000_0: Committed. Elapsed time: 0 ms.
20:33:54.473 INFO  Executor - Finished task 0.0 in stage 261.0 (TID 317). 1858 bytes result sent to driver
20:33:54.473 INFO  TaskSetManager - Finished task 0.0 in stage 261.0 (TID 317) in 39 ms on localhost (executor driver) (1/1)
20:33:54.473 INFO  TaskSchedulerImpl - Removed TaskSet 261.0, whose tasks have all completed, from pool 
20:33:54.473 INFO  DAGScheduler - ResultStage 261 (runJob at SparkHadoopWriter.scala:83) finished in 0.047 s
20:33:54.473 INFO  DAGScheduler - Job 197 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.474 INFO  TaskSchedulerImpl - Killing all running tasks in stage 261: Stage finished
20:33:54.474 INFO  DAGScheduler - Job 197 finished: runJob at SparkHadoopWriter.scala:83, took 0.115598 s
20:33:54.474 INFO  SparkHadoopWriter - Start to commit write Job job_202507152033544058447364982349331_1258.
20:33:54.478 INFO  SparkHadoopWriter - Write Job job_202507152033544058447364982349331_1258 committed. Elapsed time: 4 ms.
20:33:54.489 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to file:////tmp/ReadsSparkSinkUnitTest114189241005654697824.bam
20:33:54.493 INFO  HadoopFileSystemWrapper - Concatenating to file:////tmp/ReadsSparkSinkUnitTest114189241005654697824.bam done
20:33:54.493 INFO  IndexFileMerger - Merging .sbi files in temp directory file:////tmp/ReadsSparkSinkUnitTest114189241005654697824.bam.parts/ to file:////tmp/ReadsSparkSinkUnitTest114189241005654697824.bam.sbi
20:33:54.498 INFO  IndexFileMerger - Done merging .sbi files
20:33:54.498 INFO  IndexFileMerger - Merging .bai files in temp directory file:////tmp/ReadsSparkSinkUnitTest114189241005654697824.bam.parts/ to file:////tmp/ReadsSparkSinkUnitTest114189241005654697824.bam.bai
20:33:54.502 INFO  IndexFileMerger - Done merging .bai files
20:33:54.504 INFO  MemoryStore - Block broadcast_531 stored as values in memory (estimated size 320.0 B, free 1917.2 MiB)
20:33:54.505 INFO  MemoryStore - Block broadcast_531_piece0 stored as bytes in memory (estimated size 233.0 B, free 1917.2 MiB)
20:33:54.505 INFO  BlockManagerInfo - Added broadcast_531_piece0 in memory on localhost:45281 (size: 233.0 B, free: 1919.5 MiB)
20:33:54.506 INFO  SparkContext - Created broadcast 531 from broadcast at BamSource.java:104
20:33:54.507 INFO  MemoryStore - Block broadcast_532 stored as values in memory (estimated size 297.9 KiB, free 1916.9 MiB)
20:33:54.514 INFO  MemoryStore - Block broadcast_532_piece0 stored as bytes in memory (estimated size 50.2 KiB, free 1916.8 MiB)
20:33:54.514 INFO  BlockManagerInfo - Added broadcast_532_piece0 in memory on localhost:45281 (size: 50.2 KiB, free: 1919.5 MiB)
20:33:54.514 INFO  SparkContext - Created broadcast 532 from newAPIHadoopFile at PathSplitSource.java:96
20:33:54.523 INFO  FileInputFormat - Total input files to process : 1
20:33:54.537 INFO  SparkContext - Starting job: collect at ReadsSparkSinkUnitTest.java:182
20:33:54.537 INFO  DAGScheduler - Got job 198 (collect at ReadsSparkSinkUnitTest.java:182) with 1 output partitions
20:33:54.537 INFO  DAGScheduler - Final stage: ResultStage 262 (collect at ReadsSparkSinkUnitTest.java:182)
20:33:54.537 INFO  DAGScheduler - Parents of final stage: List()
20:33:54.537 INFO  DAGScheduler - Missing parents: List()
20:33:54.537 INFO  DAGScheduler - Submitting ResultStage 262 (MapPartitionsRDD[1264] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:54.543 INFO  MemoryStore - Block broadcast_533 stored as values in memory (estimated size 148.2 KiB, free 1916.7 MiB)
20:33:54.544 INFO  MemoryStore - Block broadcast_533_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1916.6 MiB)
20:33:54.544 INFO  BlockManagerInfo - Added broadcast_533_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.4 MiB)
20:33:54.544 INFO  SparkContext - Created broadcast 533 from broadcast at DAGScheduler.scala:1580
20:33:54.544 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 262 (MapPartitionsRDD[1264] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:54.544 INFO  TaskSchedulerImpl - Adding task set 262.0 with 1 tasks resource profile 0
20:33:54.545 INFO  TaskSetManager - Starting task 0.0 in stage 262.0 (TID 318) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:54.545 INFO  Executor - Running task 0.0 in stage 262.0 (TID 318)
20:33:54.557 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest114189241005654697824.bam:0+237038
20:33:54.563 INFO  Executor - Finished task 0.0 in stage 262.0 (TID 318). 651483 bytes result sent to driver
20:33:54.564 INFO  TaskSetManager - Finished task 0.0 in stage 262.0 (TID 318) in 19 ms on localhost (executor driver) (1/1)
20:33:54.564 INFO  TaskSchedulerImpl - Removed TaskSet 262.0, whose tasks have all completed, from pool 
20:33:54.564 INFO  DAGScheduler - ResultStage 262 (collect at ReadsSparkSinkUnitTest.java:182) finished in 0.026 s
20:33:54.564 INFO  DAGScheduler - Job 198 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.564 INFO  TaskSchedulerImpl - Killing all running tasks in stage 262: Stage finished
20:33:54.564 INFO  DAGScheduler - Job 198 finished: collect at ReadsSparkSinkUnitTest.java:182, took 0.027402 s
20:33:54.574 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:54.574 INFO  DAGScheduler - Got job 199 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:54.574 INFO  DAGScheduler - Final stage: ResultStage 263 (count at ReadsSparkSinkUnitTest.java:185)
20:33:54.574 INFO  DAGScheduler - Parents of final stage: List()
20:33:54.574 INFO  DAGScheduler - Missing parents: List()
20:33:54.574 INFO  DAGScheduler - Submitting ResultStage 263 (MapPartitionsRDD[1246] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:54.591 INFO  MemoryStore - Block broadcast_534 stored as values in memory (estimated size 426.1 KiB, free 1916.2 MiB)
20:33:54.592 INFO  MemoryStore - Block broadcast_534_piece0 stored as bytes in memory (estimated size 153.6 KiB, free 1916.1 MiB)
20:33:54.593 INFO  BlockManagerInfo - Added broadcast_534_piece0 in memory on localhost:45281 (size: 153.6 KiB, free: 1919.3 MiB)
20:33:54.593 INFO  SparkContext - Created broadcast 534 from broadcast at DAGScheduler.scala:1580
20:33:54.593 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 263 (MapPartitionsRDD[1246] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:54.593 INFO  TaskSchedulerImpl - Adding task set 263.0 with 1 tasks resource profile 0
20:33:54.593 INFO  TaskSetManager - Starting task 0.0 in stage 263.0 (TID 319) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7872 bytes) 
20:33:54.594 INFO  Executor - Running task 0.0 in stage 263.0 (TID 319)
20:33:54.623 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam:0+222075
20:33:54.637 INFO  Executor - Finished task 0.0 in stage 263.0 (TID 319). 1075 bytes result sent to driver
20:33:54.637 INFO  BlockManagerInfo - Removed broadcast_520_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.3 MiB)
20:33:54.638 INFO  TaskSetManager - Finished task 0.0 in stage 263.0 (TID 319) in 45 ms on localhost (executor driver) (1/1)
20:33:54.638 INFO  BlockManagerInfo - Removed broadcast_526_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.4 MiB)
20:33:54.638 INFO  TaskSchedulerImpl - Removed TaskSet 263.0, whose tasks have all completed, from pool 
20:33:54.638 INFO  DAGScheduler - ResultStage 263 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.063 s
20:33:54.638 INFO  DAGScheduler - Job 199 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.638 INFO  TaskSchedulerImpl - Killing all running tasks in stage 263: Stage finished
20:33:54.638 INFO  DAGScheduler - Job 199 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.064090 s
20:33:54.639 INFO  BlockManagerInfo - Removed broadcast_530_piece0 on localhost:45281 in memory (size: 67.0 KiB, free: 1919.4 MiB)
20:33:54.639 INFO  BlockManagerInfo - Removed broadcast_515_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.5 MiB)
20:33:54.640 INFO  BlockManagerInfo - Removed broadcast_524_piece0 on localhost:45281 in memory (size: 3.8 KiB, free: 1919.5 MiB)
20:33:54.640 INFO  BlockManagerInfo - Removed broadcast_528_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.5 MiB)
20:33:54.641 INFO  BlockManagerInfo - Removed broadcast_529_piece0 on localhost:45281 in memory (size: 166.1 KiB, free: 1919.6 MiB)
20:33:54.641 INFO  BlockManagerInfo - Removed broadcast_533_piece0 on localhost:45281 in memory (size: 54.5 KiB, free: 1919.7 MiB)
20:33:54.641 INFO  BlockManagerInfo - Removed broadcast_527_piece0 on localhost:45281 in memory (size: 9.6 KiB, free: 1919.7 MiB)
20:33:54.642 INFO  BlockManagerInfo - Removed broadcast_521_piece0 on localhost:45281 in memory (size: 50.2 KiB, free: 1919.8 MiB)
20:33:54.642 INFO  SparkContext - Starting job: count at ReadsSparkSinkUnitTest.java:185
20:33:54.642 INFO  DAGScheduler - Got job 200 (count at ReadsSparkSinkUnitTest.java:185) with 1 output partitions
20:33:54.643 INFO  DAGScheduler - Final stage: ResultStage 264 (count at ReadsSparkSinkUnitTest.java:185)
20:33:54.643 INFO  DAGScheduler - Parents of final stage: List()
20:33:54.643 INFO  DAGScheduler - Missing parents: List()
20:33:54.643 INFO  DAGScheduler - Submitting ResultStage 264 (MapPartitionsRDD[1264] at filter at ReadsSparkSource.java:96), which has no missing parents
20:33:54.649 INFO  MemoryStore - Block broadcast_535 stored as values in memory (estimated size 148.1 KiB, free 1918.6 MiB)
20:33:54.650 INFO  MemoryStore - Block broadcast_535_piece0 stored as bytes in memory (estimated size 54.5 KiB, free 1918.6 MiB)
20:33:54.650 INFO  BlockManagerInfo - Added broadcast_535_piece0 in memory on localhost:45281 (size: 54.5 KiB, free: 1919.7 MiB)
20:33:54.650 INFO  SparkContext - Created broadcast 535 from broadcast at DAGScheduler.scala:1580
20:33:54.650 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 264 (MapPartitionsRDD[1264] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
20:33:54.650 INFO  TaskSchedulerImpl - Adding task set 264.0 with 1 tasks resource profile 0
20:33:54.650 INFO  TaskSetManager - Starting task 0.0 in stage 264.0 (TID 320) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7810 bytes) 
20:33:54.651 INFO  Executor - Running task 0.0 in stage 264.0 (TID 320)
20:33:54.662 INFO  NewHadoopRDD - Input split: file:/tmp/ReadsSparkSinkUnitTest114189241005654697824.bam:0+237038
20:33:54.666 INFO  Executor - Finished task 0.0 in stage 264.0 (TID 320). 989 bytes result sent to driver
20:33:54.666 INFO  TaskSetManager - Finished task 0.0 in stage 264.0 (TID 320) in 16 ms on localhost (executor driver) (1/1)
20:33:54.666 INFO  TaskSchedulerImpl - Removed TaskSet 264.0, whose tasks have all completed, from pool 
20:33:54.666 INFO  DAGScheduler - ResultStage 264 (count at ReadsSparkSinkUnitTest.java:185) finished in 0.023 s
20:33:54.666 INFO  DAGScheduler - Job 200 is finished. Cancelling potential speculative or zombie tasks for this job
20:33:54.666 INFO  TaskSchedulerImpl - Killing all running tasks in stage 264: Stage finished
20:33:54.666 INFO  DAGScheduler - Job 200 finished: count at ReadsSparkSinkUnitTest.java:185, took 0.024019 s