Skip to content

Commit

Permalink
[ADAM-823] referenceIndex's in SAM/ADAM seq records, Contigs
Browse files Browse the repository at this point in the history
fixes #823, fixes #822
  • Loading branch information
ryan-williams authored and fnothaft committed Sep 29, 2015
1 parent 7ee4d1b commit 283ea9d
Show file tree
Hide file tree
Showing 34 changed files with 270 additions and 101 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@ import org.bdgenomics.formats.avro.AlignmentRecord
class JavaADAMContextSuite extends ADAMFunSuite {

sparkTest("can read a small .SAM file") {
val path = ClassLoader.getSystemClassLoader.getResource("small.sam").getFile
val path = resourcePath("small.sam")
val ctx = new JavaADAMContext(sc)
val reads: JavaAlignmentRecordRDD = ctx.adamRecordLoad(path)
assert(reads.jrdd.count() === 20)
}

sparkTest("can read a small .SAM file inside of java") {
val path = ClassLoader.getSystemClassLoader.getResource("small.sam").getFile
val path = resourcePath("small.sam")
val reads: RDD[AlignmentRecord] = sc.loadAlignments(path)

val newReads: JavaAlignmentRecordRDD = JavaADAMConduit.conduit(reads)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,9 @@ class Transform(protected val args: TransformArgs) extends BDGSparkCommand[Trans
.fold(new ConsensusGeneratorFromReads().asInstanceOf[ConsensusGenerator])(
new ConsensusGeneratorFromKnowns(_, sc).asInstanceOf[ConsensusGenerator])

adamRecords = adamRecords.adamRealignIndels(consensusGenerator,
false,
adamRecords = adamRecords.adamRealignIndels(
consensusGenerator,
isSorted = false,
args.maxIndelSize,
args.maxConsensusNumber,
args.lodThreshold,
Expand Down
1 change: 1 addition & 0 deletions adam-cli/src/test/resources/ordered.sam
1 change: 1 addition & 0 deletions adam-cli/src/test/resources/unordered.sam
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.bdgenomics.formats.avro.AlignmentRecord
class Adam2FastqSuite extends ADAMFunSuite {

sparkTest("convert SAM to paired FASTQ") {
val readsFilepath = ClassLoader.getSystemClassLoader.getResource("bqsr1.sam").getFile
val readsFilepath = resourcePath("bqsr1.sam")

// The following fastq files were generated by Picard's SamToFastq

Expand All @@ -45,8 +45,8 @@ class Adam2FastqSuite extends ADAMFunSuite {

// VALIDATION_STRINGENCY=SILENT is necessary since they are unpaired reads and this matches the ADAM default

val fastq1Path = ClassLoader.getSystemClassLoader.getResource("bqsr1-r1.fq").getFile
val fastq2Path = ClassLoader.getSystemClassLoader.getResource("bqsr1-r2.fq").getFile
val fastq1Path = resourcePath("bqsr1-r1.fq")
val fastq2Path = resourcePath("bqsr1-r2.fq")

val outputDir = Files.createTempDir()
val outputFastqR1File = outputDir.getAbsolutePath + "/bqsr1-r1.fq"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class FlagStatSuite extends ADAMFunSuite {

sparkTest("Standard FlagStat test") {

val inputpath = ClassLoader.getSystemClassLoader.getResource("features/NA12878.sam").getFile
val inputpath = resourcePath("features/NA12878.sam")
val argLine = "%s".format(inputpath).split("\\s+")

val args: FlagStatArgs = Args4j.apply[FlagStatArgs](argLine)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli

import java.nio.file.Files

import org.bdgenomics.adam.util.ADAMFunSuite

class TransformSuite extends ADAMFunSuite {
sparkTest("unordered sam to unordered sam") {
val inputPath = resourcePath("unordered.sam")
val actualPath = tmpFile("unordered.sam")
val expectedPath = inputPath
Transform(Array("-single", inputPath, actualPath)).run(sc)
checkFiles(expectedPath, actualPath)
}

sparkTest("unordered sam to ordered sam") {
val inputPath = resourcePath("unordered.sam")
val actualPath = tmpFile("ordered.sam")
val expectedPath = resourcePath("ordered.sam")
Transform(Array("-single", "-sort_reads", inputPath, actualPath)).run(sc)
checkFiles(expectedPath, actualPath)
}

sparkTest("unordered sam, to adam, to sam") {
val inputPath = resourcePath("unordered.sam")
val intermediateAdamPath = tmpFile("unordered.adam")
val actualPath = tmpFile("unordered.sam")
val expectedPath = inputPath
Transform(Array(inputPath, intermediateAdamPath)).run(sc)
Transform(Array("-single", intermediateAdamPath, actualPath)).run(sc)
checkFiles(expectedPath, actualPath)
}

sparkTest("unordered sam, to adam, to ordered sam") {
val inputPath = resourcePath("unordered.sam")
val intermediateAdamPath = tmpFile("unordered.adam")
val actualPath = tmpFile("ordered.sam")
val expectedPath = resourcePath("ordered.sam")
Transform(Array(inputPath, intermediateAdamPath)).run(sc)
Transform(Array("-single", "-sort_reads", intermediateAdamPath, actualPath)).run(sc)
checkFiles(expectedPath, actualPath)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ object ReferencePosition extends Serializable {

class ReferencePosition(override val referenceName: String,
val pos: Long,
override val orientation: Strand = Strand.Independent) extends ReferenceRegion(referenceName, pos, pos + 1, orientation) {
}
override val orientation: Strand = Strand.Independent)
extends ReferenceRegion(referenceName, pos, pos + 1, orientation)

class ReferencePositionSerializer extends Serializer[ReferencePosition] {
private val enumValues = Strand.values()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,12 @@ object ReferenceRegion {
* which is <i>not</i> in the region -- i.e. [start, end) define a 0-based
* half-open interval.
*/
case class ReferenceRegion(referenceName: String, start: Long, end: Long, orientation: Strand = Strand.Independent) extends Comparable[ReferenceRegion] with Interval {
case class ReferenceRegion(referenceName: String,
start: Long,
end: Long,
orientation: Strand = Strand.Independent)
extends Comparable[ReferenceRegion]
with Interval {

assert(start >= 0 && end >= start, "Failed when trying to create region %s %d %d on %s strand.".format(referenceName, start, end, orientation))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ class SequenceDictionary(val records: Vector[SequenceRecord]) extends Serializab
private val byName: Map[String, SequenceRecord] = records.view.map(r => r.name -> r).toMap
assert(byName.size == records.length, "SequenceRecords with duplicate names aren't permitted")

private val hasSequenceOrdering = records.forall(_.referenceIndex.isDefined)

def isCompatibleWith(that: SequenceDictionary): Boolean = {
for (record <- that.records) {
val myRecord = byName.get(record.name)
Expand Down Expand Up @@ -109,7 +111,11 @@ class SequenceDictionary(val records: Vector[SequenceRecord]) extends Serializab
* @return Returns a SAM formatted sequence dictionary.
*/
def toSAMSequenceDictionary: SAMSequenceDictionary = {
import SequenceRecord._
implicit val ordering: Ordering[SequenceRecord] =
if (hasSequenceOrdering)
SequenceOrderingByRefIdx
else
SequenceOrderingByName
new SAMSequenceDictionary(records.sorted.map(_ toSAMSequenceRecord).toList)
}

Expand All @@ -118,26 +124,41 @@ class SequenceDictionary(val records: Vector[SequenceRecord]) extends Serializab
}
}

object SequenceOrdering extends Ordering[SequenceRecord] {
object SequenceOrderingByName extends Ordering[SequenceRecord] {
def compare(a: SequenceRecord,
b: SequenceRecord): Int = {
a.name.compareTo(b.name)
}
}

object SequenceOrderingByRefIdx extends Ordering[SequenceRecord] {
def compare(a: SequenceRecord,
b: SequenceRecord): Int = {
(for {
aRefIdx <- a.referenceIndex
bRefIdx <- b.referenceIndex
} yield {
aRefIdx.compareTo(bRefIdx)
}).getOrElse(
throw new Exception(s"Missing reference index when comparing SequenceRecords: $a, $b")
)
}
}

/**
* Utility class within the SequenceDictionary; represents unique reference name-to-id correspondence
*
*/
class SequenceRecord(
val name: String,
val length: Long,
val url: Option[String] = None,
val md5: Option[String] = None,
val refseq: Option[String] = None,
val genbank: Option[String] = None,
val assembly: Option[String] = None,
val species: Option[String] = None) extends Serializable {
case class SequenceRecord(
name: String,
length: Long,
url: Option[String],
md5: Option[String],
refseq: Option[String],
genbank: Option[String],
assembly: Option[String],
species: Option[String],
referenceIndex: Option[Int]) extends Serializable {

assert(name != null && !name.isEmpty, "SequenceRecord.name is null or empty")
assert(length > 0, "SequenceRecord.length <= 0")
Expand Down Expand Up @@ -170,6 +191,8 @@ class SequenceRecord(
// set genbank accession number if available
genbank.foreach(rec.setAttribute("GENBANK", _))

referenceIndex.foreach(rec.setSequenceIndex)

// return record
rec
}
Expand All @@ -191,16 +214,15 @@ object SequenceRecord {
val REFSEQ_TAG = "REFSEQ"
val GENBANK_TAG = "GENBANK"

implicit def ordering = SequenceOrdering

def apply(name: String,
length: Long,
md5: String = null,
url: String = null,
refseq: String = null,
genbank: String = null,
assembly: String = null,
species: String = null): SequenceRecord = {
species: String = null,
referenceIndex: Option[Int] = None): SequenceRecord = {
new SequenceRecord(
name,
length,
Expand All @@ -209,7 +231,9 @@ object SequenceRecord {
Option(refseq).map(_.toString),
Option(genbank).map(_.toString),
Option(assembly).map(_.toString),
Option(species).map(_.toString))
Option(species).map(_.toString),
referenceIndex
)
}

/*
Expand All @@ -227,7 +251,9 @@ object SequenceRecord {
refseq = record.getAttribute(REFSEQ_TAG),
genbank = record.getAttribute(GENBANK_TAG),
assembly = record.getAssembly,
species = record.getAttribute(SAMSequenceRecord.SPECIES_TAG))
species = record.getAttribute(SAMSequenceRecord.SPECIES_TAG),
referenceIndex = if (record.getSequenceIndex == -1) None else Some(record.getSequenceIndex)
)

}
def toSAMSequenceRecord(record: SequenceRecord): SAMSequenceRecord = {
Expand All @@ -244,7 +270,9 @@ object SequenceRecord {
md5 = contig.getContigMD5,
url = contig.getReferenceURL,
assembly = contig.getAssembly,
species = contig.getSpecies)
species = contig.getSpecies,
referenceIndex = Option(contig.getReferenceIndex).map(Integer2int)
)
}

def toADAMContig(record: SequenceRecord): Contig = {
Expand All @@ -255,6 +283,7 @@ object SequenceRecord {
record.url.foreach(builder.setReferenceURL)
record.assembly.foreach(builder.setAssembly)
record.species.foreach(builder.setSpecies)
record.referenceIndex.foreach(builder.setReferenceIndex(_))
builder.build
}

Expand Down Expand Up @@ -289,7 +318,8 @@ object SequenceRecord {
SequenceRecord(
rec.get(schema.getField("referenceName").pos()).toString,
rec.get(schema.getField("referenceLength").pos()).asInstanceOf[Long],
url = rec.get(schema.getField("referenceUrl").pos()).toString)
url = rec.get(schema.getField("referenceUrl").pos()).toString
)
} else if (schema.getField("contig") != null) {
val pos = schema.getField("contig").pos()
fromADAMContig(rec.get(pos).asInstanceOf[Contig])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,7 @@ class ADAMContext(val sc: SparkContext) extends Serializable with Logging {
}
}

def loadBam(
filePath: String): RDD[AlignmentRecord] = {
def loadBam(filePath: String): RDD[AlignmentRecord] = {

val path = new Path(filePath)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ abstract class ADAMSequenceDictionaryRDDAggregator[T](rdd: RDD[T]) extends Seria
*
* @return A sequence dictionary describing the reference contigs in this dataset.
*/
def adamGetSequenceDictionary(): SequenceDictionary = {
def adamGetSequenceDictionary(performLexSort: Boolean = false): SequenceDictionary = {
def mergeRecords(l: List[SequenceRecord], rec: T): List[SequenceRecord] = {
val recs = getSequenceRecordsFromElement(rec)

Expand All @@ -118,8 +118,16 @@ abstract class ADAMSequenceDictionaryRDDAggregator[T](rdd: RDD[T]) extends Seria
SequenceDictionary(recs: _*)
}

rdd.mapPartitions(iter => Iterator(foldIterator(iter)), preservesPartitioning = true)
.reduce(_ ++ _)
val sd =
rdd
.mapPartitions(iter => Iterator(foldIterator(iter)), preservesPartitioning = true)
.reduce(_ ++ _)

if (performLexSort) {
implicit val ordering = SequenceOrderingByName
SequenceDictionary(sd.records.map(_.copy(referenceIndex = None)).sorted: _*)
} else
sd
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ class NucleotideContigFragmentRDDFunctions(rdd: RDD[NucleotideContigFragment]) e
*/
def flankAdjacentFragments(flankLength: Int,
optSd: Option[SequenceDictionary] = None): RDD[NucleotideContigFragment] = {
FlankReferenceFragments(rdd, optSd.getOrElse(adamGetSequenceDictionary), flankLength)
FlankReferenceFragments(rdd, optSd.getOrElse(adamGetSequenceDictionary(performLexSort = false)), flankLength)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,11 @@ class AlignmentRecordRDDFunctions(rdd: RDD[AlignmentRecord])
isSorted: Boolean = false): Boolean = {
if (args.outputPath.endsWith(".sam")) {
log.info("Saving data in SAM format")
rdd.adamSAMSave(args.outputPath, asSingleFile = args.asSingleFile)
rdd.adamSAMSave(args.outputPath, asSingleFile = args.asSingleFile, isSorted = isSorted)
true
} else if (args.outputPath.endsWith(".bam")) {
log.info("Saving data in BAM format")
rdd.adamSAMSave(args.outputPath, asSam = false, asSingleFile = args.asSingleFile)
rdd.adamSAMSave(args.outputPath, asSam = false, asSingleFile = args.asSingleFile, isSorted = isSorted)
true
} else
false
Expand Down Expand Up @@ -234,7 +234,7 @@ class AlignmentRecordRDDFunctions(rdd: RDD[AlignmentRecord])
*/
def adamConvertToSAM(isSorted: Boolean = false): (RDD[SAMRecordWritable], SAMFileHeader) = ConvertToSAM.time {
// collect global summary data
val sd = rdd.adamGetSequenceDictionary()
val sd = rdd.adamGetSequenceDictionary(isSorted)
val rgd = rdd.adamGetReadGroupDictionary()

// create conversion object
Expand Down
14 changes: 14 additions & 0 deletions adam-core/src/test/resources/ordered.sam
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
@HD VN:1.4 SO:unsorted
@SQ SN:14 LN:107349540 M5:14
@SQ SN:GL000211.1 LN:166566 M5:GL000211.1
@SQ SN:GL000244.1 LN:39929 M5:GL000244.1
@SQ SN:chr22 LN:51304566 M5:CHR22
@RG ID:SRR062634 SM:HG00096 CN:WUGSC DS:SRP001294 LB:2845856850 PI:206 PL:ILLUMINA
SRR062634.6859057 163 14 16050606 37 100M = 16050680 174 GGTTTACTGTGTCAGAACAGAGTGTGCCGATTGTGGTCAGGACTCCATAGCATTTCACCATTGAGTTATTTCCGCCCCCTTACGTGTCTCTCTTCAGCGG GFFGGGGGGGGGGFGGGGGGGGGGGGGGGFGGGBGGCGGGDDEFFGEEFFGFEGEEEEEEEGFEGEEEEDEFEEEEEEE=DBCCBCCCCCACCAC,A@?? X0:i:1 X1:i:0 LB:Z:2845856850 MD:Z:0A1G0C1T1A0A0G1C0A0C0T0C0G0A1A0T0C1C1A1T0T0G0C0A1A0T0T0C0T1C0A1A0A0A0G1G0T0A0T0T1C0A0A1A1T0G0G0T0C0C2C0A0A0A0A0G0A0A0A0G0G0T1C1A0T1C1G0G0G0A0G0A1A1A0T1C0 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:37 XM:i:0 XO:i:0 XT:A:U
SRR062634.6859057 83 14 16050680 36 100M = 16050606 -174 CCCCCTTACGTGTCTCTCTTCAGCGGTCTATTATCTCCAAGAGGGCATAAAACACTGAGTAAACAGCTCTTTTATATGTGTTTCCTGGATGAGCCTTCTT 9<5=;;A7B<?A?A?AABB:C:CC=CACE?DDCCD@EEFEEFEFEFGFGGGEGEGGGGGGGEGFGEGGGGFGGGGGGGGGEGGGGGGGGGGGGGFGFGGG X0:i:2 X1:i:0 XA:Z:14,+19792222,100M,0; LB:Z:2845856850 MD:Z:0A0A0G0G0T1C1A0T1C1G0G0G0A0G0A1A1A0T1C0A0T0A0C0A1C0A1A0A0A2A0G0T0T0T1T0C1G2T0T3T0C2G0T0T0T0T0T0A0T0G1G0A1G2A1T1C0C0T0T2C0C0A0C0C0A0T0A0G0G1C1 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:0 XM:i:0 XO:i:0 XT:A:R
SRR062634.20563591 99 GL000211.1 100 0 100M = 500 161 GGACAACATTCACCTTTAAAAGTTTATTGATCTTTTGTGACATGCACGTGGGTTCCCAGTAGCAAGAAACTAAAGGGTCGCAGGCCGGTTTCTGCTAATT GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGFGGGGFGGGGGGGFGGGGGEGGGGGGEFGFGGFGGGGFBGGGDE>EEECBCCEEC>E:A@=ADBCAB X0:i:2 X1:i:0 XA:Z:14,-19792774,100M,0; LB:Z:2845856850 MD:Z:0T0C0T0A0C0G0A1A0A0A1A0T3T0C0C1A0A0C1G0C1C1A0T0C0A0A0A0A0G0A1A0G0G0T0T0C0A0A0C0T0C0T0G1G0A0G1T0G1A0T0G0C0A0C1C0A1T0T0C0A0A0A0G0A0A0G0T0T0T1T0C0A0G0A0A0T1C0T1C0T0G0C0 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:0 XM:i:0 XO:i:0 XT:A:R
SRR062634.20563591 147 GL000211.1 500 0 100M = 100 -161 GCAAGAAACTAAAGGGTCGCAGGCCGGTTTCTGCTAATTTCTTTAATTCCAAGACAGTCTCAAATATTTTCTTATTAACTTCCTGGAGGGAGGCTTATCA =DDFECDCFEDFEFFFEEFD:EF?FBFFFFBACEF?EGGGEDGGBGEGFGGGGGGFGGGGGGGGGFGGGGFFGGGGGGGFGGGGGGGGGGGGGGGGGGGG X0:i:2 X1:i:0 XA:Z:14,+19792713,100M,0; LB:Z:2845856850 MD:Z:0A0T0G0C0A0C1C0A1T0T0C0A0A0A0G0A0A0G0T0T0T1T0C0A0G0A0A0T1C0T1C0T0G0C0A0T0A0G0C0T0T3T0G0T2A0G0A1A1T0T0C2T2C0C0A0C0C0C1A0G0G1C2A0A0A1C0A0C0T0C1A0A0A1G2C0 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:0 XM:i:0 XO:i:0 XT:A:R
SRR062634.5613583 99 GL000244.1 200 0 100M = 600 158 TGCAGGCCGGTTTCTGCTAATTTCTTTAATTCCAAGACAGTCTCAATTATTTTCTTATTAACTTCCTGGAGGGAGGCTTATCATTCTCTCTTTTGGATGA GGEGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGDEGEFEGDCEEEBF?DDEEGDGEEEGE=ABB? X0:i:2 X1:i:0 XA:Z:14,-19792696,100M,2; LB:Z:2845856850 MD:Z:0A0A0G0T0T0T1T0C0A0G0A0A0T1C0T1C0T0G0C0A0T0A0G0C0T0T3T0G0T2A0G0A1A1T0T0C0A1T2C0C0A0C0C0C1A0G0G1C2A0A0A1C0A0C0T0C1A0A0A1G2C0A0C0A0G0G0C0A0G0A2C0C1C0A1 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:2 SM:i:0 XM:i:2 XO:i:0 XT:A:R
SRR062634.5613583 147 GL000244.1 600 0 100M = 200 -158 TAACTTCCTGGAGGGAGGCTTATCATTCTCTCTTTTGGATGATTCTAAGTACCAGCTAAAATACAGCTATCATTCATTTTCCTTGATTTGGGAGCCTAAT @E=EAEEEEGF?EEGEGFFEDBGEGFGGGGFGFFGFGGGGGGGGGGGGGGEGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG X0:i:2 X1:i:0 XA:Z:14,+19792638,100M,0; LB:Z:2845856850 MD:Z:0A0G0G1C2A0A0A1C0A0C0T0C1A0A0A1G2C0A0C0A0G0G0C0A0G0A2C0C1C0A1A0A0A0G1G0T0A0T0T0T0C0A0A0A1C0T0G0G0T1C0A0T0C1A0A0G0G0A0A1G0A3A0G0C0T0C1G1T1A1T0G0A0A0T0G0C0 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:0 XM:i:0 XO:i:0 XT:A:R
SRR062634.11091461 99 chr22 16050471 0 100M = 16050505 134 CAAGTAGTAGTGCCATAATTACCAAACATAAAGCCAACTGAGATGCCCAAAGGGGGCCACTCTCCTTGCTTTTCCTCCTTTTTAGAGGATTTATTTCCCA GGGGGFFFFFFGGGGGGGGGGGGGGGGGGGGGGGGGGGGFGGGFGGEGGEFEEBEEECFEEEBEFBACDBEEEEEACC?AAAA7=5::59BBBCB6/=CA X0:i:2 X1:i:0 XA:Z:14,-19792431,100M,0; LB:Z:2845856850 MD:Z:1T1C0A1A0A2A1T0T0T1T1C0A1A1T0G0C0T0C0A1T0C0A0A0A0G0G0A0A0A0G1T1C0A0A1T0C0T1T0A0A1A0T0G0A0A1C1A0C0A0T0A1C0A0C0A0A0A0G0A0A0G3C0T0C0A0A1A2C2C0T0G0T0C0 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:0 XM:i:0 XO:i:0 XT:A:R
SRR062634.11091461 147 chr22 16050505 0 100M = 16050471 -134 CAACTGAGATGCCCAAAGGGGGCCACTCTCCTTGCTTTTCCTCCTTTTTAGAGGATTTATTTCCCATTTTTCTTAAAAAGGAAGAACAAACTGTGCCCTA =@@?CCCD?@DCDCC@CEFEEGEEEFFFFDEFEGAGGFGGGGGGGGGGGGGFGGGGGFGGDGEGFGGGGGGGGGGGGGGGGGGGGGGGGGGGGGFGGGGG X0:i:2 X1:i:0 XA:Z:14,+19792397,100M,0; LB:Z:2845856850 MD:Z:0A0G0G0A0A0A0G1T1C0A0A1T0C0T1T0A0A1A0T0G0A0A1C1A0C0A0T0A1C0A0C0A0A0A0G0A0A0G3C0T0C0A0A1A2C2C0T0G0T0C0C0A0G2G3C0G0T0G0A0A0G1T0G0T0T0T0T0C0T1T1C1A0A0C1 RG:Z:SRR062634 XG:i:0 AM:i:0 NM:i:0 SM:i:0 XM:i:0 XO:i:0 XT:A:R
Loading

0 comments on commit 283ea9d

Please sign in to comment.