-
Notifications
You must be signed in to change notification settings - Fork 244
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Index merging #1263
Index merging #1263
Changes from 15 commits
11ffa68
1023d90
2620eac
a00f64c
9eb00e5
173ea7a
d1e1683
63eec73
807e8e1
3fc1e04
c80d522
e43fbac
c4fec3c
9bf8d90
9277912
3432935
b66d0fe
33a9150
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,289 @@ | ||
/* | ||
* The MIT License | ||
* | ||
* Copyright (c) 2018 The Broad Institute | ||
* | ||
* Permission is hereby granted, free of charge, to any person obtaining a copy | ||
* of this software and associated documentation files (the "Software"), to deal | ||
* in the Software without restriction, including without limitation the rights | ||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
* copies of the Software, and to permit persons to whom the Software is | ||
* furnished to do so, subject to the following conditions: | ||
* | ||
* The above copyright notice and this permission notice shall be included in | ||
* all copies or substantial portions of the Software. | ||
* | ||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||
* THE SOFTWARE. | ||
*/ | ||
package htsjdk.samtools; | ||
|
||
import htsjdk.samtools.seekablestream.SeekableStream; | ||
import htsjdk.samtools.util.BlockCompressedFilePointerUtil; | ||
|
||
import java.io.OutputStream; | ||
import java.util.ArrayList; | ||
import java.util.Arrays; | ||
import java.util.Collections; | ||
import java.util.List; | ||
import java.util.Objects; | ||
import java.util.stream.Collectors; | ||
|
||
/** | ||
* Merges BAM index files for (headerless) parts of a BAM file into a single | ||
* index file. The index files must have been produced using an uninitialized window (TODO). | ||
*/ | ||
public class BAMIndexMerger extends IndexMerger<AbstractBAMFileIndex> { | ||
|
||
private static final int UNINITIALIZED_WINDOW = -1; | ||
|
||
private int numReferences = -1; | ||
private List<AbstractBAMFileIndex> indexes = new ArrayList<>(); | ||
tomwhite marked this conversation as resolved.
Show resolved
Hide resolved
|
||
private long noCoordinateCount; | ||
|
||
public BAMIndexMerger(final OutputStream out, final long headerLength) { | ||
super(out, headerLength); | ||
} | ||
|
||
@Override | ||
public void processIndex(final AbstractBAMFileIndex index, final long partLength) { | ||
this.partLengths.add(partLength); | ||
if (numReferences == -1) { | ||
numReferences = index.getNumberOfReferences(); | ||
} | ||
if (index.getNumberOfReferences() != numReferences) { | ||
throw new IllegalArgumentException( | ||
String.format("Cannot merge BAI files with different number of references, %s and %s.", numReferences, index.getNumberOfReferences())); | ||
} | ||
// just store the indexes rather than computing the BAMIndexContent for each ref, | ||
// since there may be thousands of refs and indexes, each with thousands of bins | ||
indexes.add(index); | ||
noCoordinateCount += index.getNoCoordinateCount(); | ||
} | ||
|
||
@Override | ||
public void finish(final long dataFileLength) { | ||
if (indexes.isEmpty()) { | ||
throw new IllegalArgumentException("Cannot merge zero BAI files"); | ||
} | ||
final long[] offsets = partLengths.stream().mapToLong(i -> i).toArray(); | ||
Arrays.parallelPrefix(offsets, (a, b) -> a + b); // cumulative offsets | ||
|
||
try (BinaryBAMIndexWriter writer = | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. extraneous new-line |
||
new BinaryBAMIndexWriter(numReferences, out)) { | ||
for (int ref = 0; ref < numReferences; ref++) { | ||
final int r = ref; | ||
List<BAMIndexContent> bamIndexContentList = indexes.stream().map(index -> index.getQueryResults(r)).collect(Collectors.toList()); | ||
final BAMIndexContent bamIndexContent = mergeBAMIndexContent(ref, bamIndexContentList, offsets); | ||
writer.writeReference(bamIndexContent); | ||
} | ||
writer.writeNoCoordinateRecordCount(noCoordinateCount); | ||
} | ||
} | ||
|
||
public static AbstractBAMFileIndex openIndex(SeekableStream stream, SAMSequenceDictionary dictionary) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is unused, should it be deleted? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Changed to use from the test, so |
||
return new CachingBAMFileIndexOptimized(stream, dictionary); | ||
} | ||
|
||
private static BAMIndexContent mergeBAMIndexContent(final int referenceSequence, | ||
final List<BAMIndexContent> bamIndexContentList, final long[] offsets) { | ||
final List<BinningIndexContent.BinList> binLists = new ArrayList<>(); | ||
final List<BAMIndexMetaData> metaDataList = new ArrayList<>(); | ||
final List<LinearIndex> linearIndexes = new ArrayList<>(); | ||
for (BAMIndexContent bamIndexContent : bamIndexContentList) { | ||
if (bamIndexContent == null) { | ||
binLists.add(null); | ||
metaDataList.add(null); | ||
linearIndexes.add(null); | ||
} else { | ||
binLists.add(bamIndexContent.getBins()); | ||
metaDataList.add(bamIndexContent.getMetaData()); | ||
linearIndexes.add(bamIndexContent.getLinearIndex()); | ||
} | ||
} | ||
return new BAMIndexContent( | ||
referenceSequence, | ||
mergeBins(binLists, offsets), | ||
mergeMetaData(metaDataList, offsets), | ||
mergeLinearIndexes(referenceSequence, linearIndexes, offsets)); | ||
} | ||
|
||
/** | ||
* Merge bins for (headerless) BAM file parts. | ||
* @param binLists the bins to merge | ||
* @param offsets bin <i>i</i> will be shifted by offset <i>i</i> | ||
* @return the merged bins | ||
*/ | ||
public static BinningIndexContent.BinList mergeBins(final List<BinningIndexContent.BinList> binLists, final long[] offsets) { | ||
final List<Bin> mergedBins = new ArrayList<>(); | ||
final int maxBinNumber = binLists.stream().filter(Objects::nonNull).mapToInt(bl -> bl.maxBinNumber).max().orElse(0); | ||
int commonNonNullBins = 0; | ||
for (int i = 0; i <= maxBinNumber; i++) { | ||
final List<Bin> nonNullBins = new ArrayList<>(); | ||
for (int j = 0; j < binLists.size(); j++) { | ||
final BinningIndexContent.BinList binList = binLists.get(j); | ||
if (binList == null) { | ||
continue; | ||
} | ||
final Bin bin = binList.getBin(i); | ||
if (bin != null) { | ||
nonNullBins.add(bin.shift(offsets[j])); | ||
} | ||
} | ||
if (!nonNullBins.isEmpty()) { | ||
mergedBins.add(mergeBins(nonNullBins)); | ||
commonNonNullBins += nonNullBins.size() - 1; | ||
} | ||
} | ||
final int numberOfNonNullBins = | ||
binLists.stream().filter(Objects::nonNull).mapToInt(BinningIndexContent.BinList::getNumberOfNonNullBins).sum() - commonNonNullBins; | ||
return new BinningIndexContent.BinList(mergedBins.toArray(new Bin[0]), numberOfNonNullBins); | ||
} | ||
|
||
private static Bin mergeBins(final List<Bin> bins) { | ||
if (bins.isEmpty()) { | ||
throw new IllegalArgumentException("Cannot merge empty bins"); | ||
} | ||
if (bins.size() == 1) { | ||
return bins.get(0); | ||
} | ||
final int referenceSequence = bins.get(0).getReferenceSequence(); | ||
final int binNumber = bins.get(0).getBinNumber(); | ||
final List<Chunk> allChunks = new ArrayList<>(); | ||
for (Bin b : bins) { | ||
if (b.getReferenceSequence() != referenceSequence) { | ||
throw new IllegalArgumentException("Bins have different reference sequences"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It would be good if this error included the two numbers. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done |
||
} | ||
if (b.getBinNumber() != binNumber) { | ||
throw new IllegalArgumentException("Bins have different numbers"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Likewise There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done |
||
} | ||
allChunks.addAll(b.getChunkList()); | ||
} | ||
Collections.sort(allChunks); | ||
final Bin bin = new Bin(referenceSequence, binNumber); | ||
for (Chunk newChunk : allChunks) { | ||
// logic is from BinningIndexBuilder#processFeature | ||
final long chunkStart = newChunk.getChunkStart(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think this block could probably be extracted as a method Bin.addChunk() and replace duplicate code here, CRAMBAIIndexer, and BinningIndexBuilder There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done |
||
final long chunkEnd = newChunk.getChunkEnd(); | ||
|
||
final List<Chunk> oldChunks = bin.getChunkList(); | ||
if (!bin.containsChunks()) { | ||
bin.addInitialChunk(newChunk); | ||
} else { | ||
final Chunk lastChunk = bin.getLastChunk(); | ||
|
||
// Coalesce chunks that are in the same or adjacent file blocks. | ||
// Similar to AbstractBAMFileIndex.optimizeChunkList, | ||
// but no need to copy the list, no minimumOffset, and maintain bin.lastChunk | ||
if (BlockCompressedFilePointerUtil.areInSameOrAdjacentBlocks( | ||
lastChunk.getChunkEnd(), chunkStart)) { | ||
lastChunk.setChunkEnd(chunkEnd); // coalesced | ||
} else { | ||
oldChunks.add(newChunk); | ||
bin.setLastChunk(newChunk); | ||
} | ||
} | ||
} | ||
return bin; | ||
} | ||
|
||
private static BAMIndexMetaData mergeMetaData(final List<BAMIndexMetaData> metaDataList, final long[] offsets) { | ||
final List<BAMIndexMetaData> newMetadataList = new ArrayList<>(); | ||
for (int i = 0; i < metaDataList.size(); i++) { | ||
if (metaDataList.get(i) == null) { | ||
continue; | ||
} | ||
newMetadataList.add(metaDataList.get(i).shift(offsets[i])); | ||
} | ||
return mergeMetaData(newMetadataList); | ||
} | ||
|
||
private static BAMIndexMetaData mergeMetaData(final List<BAMIndexMetaData> metaDataList) { | ||
long firstOffset = Long.MAX_VALUE; | ||
long lastOffset = Long.MIN_VALUE; | ||
long alignedRecordCount = 0; | ||
long unalignedRecordCount = 0; | ||
|
||
for (BAMIndexMetaData metaData : metaDataList) { | ||
if (metaData.getFirstOffset() != -1) { // -1 is unset, see BAMIndexMetaData | ||
firstOffset = Math.min(firstOffset, metaData.getFirstOffset()); | ||
} | ||
if (metaData.getLastOffset() != 0) { // 0 is unset, see BAMIndexMetaData | ||
lastOffset = Math.max(lastOffset, metaData.getLastOffset()); | ||
} | ||
alignedRecordCount += metaData.getAlignedRecordCount(); | ||
unalignedRecordCount += metaData.getUnalignedRecordCount(); | ||
} | ||
|
||
if (firstOffset == Long.MAX_VALUE) { | ||
firstOffset = -1; | ||
} | ||
if (lastOffset == Long.MIN_VALUE) { | ||
lastOffset = -1; | ||
} | ||
|
||
final List<Chunk> chunkList = new ArrayList<>(); | ||
chunkList.add(new Chunk(firstOffset, lastOffset)); | ||
chunkList.add(new Chunk(alignedRecordCount, unalignedRecordCount)); | ||
return new BAMIndexMetaData(chunkList); | ||
} | ||
|
||
/** | ||
* Merge linear indexes for (headerless) BAM file parts. | ||
* @param referenceSequence the reference sequence number for the linear indexes being merged | ||
* @param linearIndexes the linear indexes to merge | ||
* @param offsets linear index <i>i</i> will be shifted by offset <i>i</i> | ||
* @return the merged linear index | ||
*/ | ||
public static LinearIndex mergeLinearIndexes(final int referenceSequence, final List<LinearIndex> linearIndexes, final long[] offsets) { | ||
int maxIndex = -1; | ||
for (LinearIndex li : linearIndexes) { | ||
if (li == null) { | ||
continue; | ||
} | ||
if (li.getIndexStart() != 0) { | ||
throw new IllegalArgumentException("Cannot merge linear indexes that don't all start at zero"); | ||
} | ||
maxIndex = Math.max(maxIndex, li.size()); | ||
} | ||
if (maxIndex == -1) { | ||
return new LinearIndex(referenceSequence, 0, new long[0]); | ||
} | ||
|
||
final long[] entries = new long[maxIndex]; | ||
Arrays.fill(entries, UNINITIALIZED_WINDOW); | ||
for (int i = 0; i < maxIndex; i++) { | ||
for (int liIndex = 0; liIndex < linearIndexes.size(); liIndex++) { | ||
final LinearIndex li = linearIndexes.get(liIndex); | ||
if (li == null) { | ||
continue; | ||
} | ||
final long[] indexEntries = li.getIndexEntries(); | ||
// Use the first linear index that has an index entry at position i. | ||
// There is no need to check later linear indexes, since their entries | ||
// will be guaranteed to have larger offsets (as a consequence of files | ||
// being coordinate-sorted). | ||
if (i < indexEntries.length && indexEntries[i] != UNINITIALIZED_WINDOW) { | ||
entries[i] = BlockCompressedFilePointerUtil.shift(indexEntries[i], offsets[liIndex]); | ||
break; | ||
} | ||
} | ||
} | ||
// Convert all uninitialized values following the procedure in | ||
// BinningIndexBuilder#generateIndexContent. | ||
long lastNonZeroOffset = 0; | ||
for (int i = 0; i < maxIndex; i++) { | ||
if (entries[i] == UNINITIALIZED_WINDOW) { | ||
entries[i] = lastNonZeroOffset; | ||
} else { | ||
lastNonZeroOffset = entries[i]; | ||
} | ||
} | ||
return new LinearIndex(referenceSequence, 0, entries); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -88,6 +88,13 @@ public class BAMIndexMetaData { | |
} | ||
} | ||
|
||
BAMIndexMetaData(final long firstOffset, final long lastOffset, final int alignedRecords, final int unAlignedRecords) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. should this be private? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done |
||
this.firstOffset = firstOffset; | ||
this.lastOffset = lastOffset; | ||
this.alignedRecords = alignedRecords; | ||
this.unAlignedRecords = unAlignedRecords; | ||
} | ||
|
||
/** | ||
* @return the count of aligned records associated with this reference | ||
*/ | ||
|
@@ -207,6 +214,19 @@ long getLastOffset() { | |
return lastOffset; | ||
} | ||
|
||
/** | ||
* Return a new metadata object shifted by a given (non-virtual) offset. | ||
* | ||
* @param offset the offset in bytes | ||
* @return a new metadata object shifted by the given offset | ||
* @see BlockCompressedFilePointerUtil#shift(long, long) | ||
*/ | ||
BAMIndexMetaData shift(final long offset) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Private unless this is used in testing somewhere. It wouldn't be bad to unit test this operation. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's used by |
||
final long newFirstOffset = firstOffset == -1 ? firstOffset : BlockCompressedFilePointerUtil.shift(firstOffset, offset); // -1 is unset | ||
final long newLastOffset = lastOffset == 0 ? lastOffset : BlockCompressedFilePointerUtil.shift(lastOffset, offset); // 0 is unset | ||
return new BAMIndexMetaData(newFirstOffset, newLastOffset, alignedRecords, unAlignedRecords); | ||
} | ||
|
||
/** | ||
* Prints meta-data statistics from BAM index (.bai or .csi) file | ||
* Statistics include count of aligned and unaligned reads for each reference sequence | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There's a todo in this javadoc. Could you fill out a more complete explanation of what it means now?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Done