From 45c3b8a36fe859b3f5ceefbb560ff49e44f90f59 Mon Sep 17 00:00:00 2001 From: Jonathan Baber Date: Thu, 1 Aug 2019 08:34:53 +1000 Subject: [PATCH] Fix clonality exception --- .../variant/clonality/PeakModelFactory.java | 8 +++++--- .../variant/clonality/PealModelFactoryTest.java | 9 +++++++++ .../clonality/WeightedPloidyHistogramTest.java | 2 +- pom.xml | 2 +- purity-ploidy-estimator/README.md | 3 +++ .../src/main/resources/r/copyNumberPlots.R | 16 ++++++++++------ 6 files changed, 29 insertions(+), 11 deletions(-) diff --git a/hmf-common/src/main/java/com/hartwig/hmftools/common/variant/clonality/PeakModelFactory.java b/hmf-common/src/main/java/com/hartwig/hmftools/common/variant/clonality/PeakModelFactory.java index 526b3b0f21..e915b623de 100644 --- a/hmf-common/src/main/java/com/hartwig/hmftools/common/variant/clonality/PeakModelFactory.java +++ b/hmf-common/src/main/java/com/hartwig/hmftools/common/variant/clonality/PeakModelFactory.java @@ -5,6 +5,7 @@ import java.util.Map; import java.util.stream.Collectors; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.hartwig.hmftools.common.numeric.Doubles; @@ -161,12 +162,13 @@ private List peakPloidies(double peak, @NotNull final List peakPloidies) { + @VisibleForTesting + double[] modelPeakHistogram(double peak, @NotNull final List peakPloidies) { double offset = offset(peak); - int maxBucket = bucket(maxPloidy) + 1; - double[] result = new double[maxBucket]; + int maxBucket = bucket(maxPloidy); + double[] result = new double[maxBucket + 1]; double[] weight = scalingFactor(peak, peakPloidies); int startBucket = bucket(peak - offset); diff --git a/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/PealModelFactoryTest.java b/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/PealModelFactoryTest.java index 51410cd84c..a7348de700 100644 --- a/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/PealModelFactoryTest.java +++ b/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/PealModelFactoryTest.java @@ -8,6 +8,8 @@ import java.util.Date; import java.util.List; +import com.google.common.collect.Lists; + import org.junit.Ignore; import org.junit.Test; @@ -20,6 +22,13 @@ public void testOffset() { assertEquals(-0.02, victim.offset(0.08), 0.01); } + @Test + public void testMaxBucket() { + final PeakModelFactory victim = new PeakModelFactory(10, 0.05); + victim.modelPeakHistogram(8.18, Lists.newArrayList(WeightedPloidyHistogramTest.create(8.18, 18, 55))); + } + + @Ignore public void testPeakModelling() throws IOException { long startTime = new Date().getTime(); diff --git a/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/WeightedPloidyHistogramTest.java b/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/WeightedPloidyHistogramTest.java index 89591eec60..7344a5d52e 100644 --- a/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/WeightedPloidyHistogramTest.java +++ b/hmf-common/src/test/java/com/hartwig/hmftools/common/variant/clonality/WeightedPloidyHistogramTest.java @@ -128,7 +128,7 @@ static List readResource(@NotNull final String file) { return result; } - private static WeightedPloidy create(double ploidy, int alleleReadCount, int totalReadCount) { + static WeightedPloidy create(double ploidy, int alleleReadCount, int totalReadCount) { return ModifiableWeightedPloidy.create() .setPloidy(ploidy) .setWeight(1) diff --git a/pom.xml b/pom.xml index 7c4016b653..c56fe60c50 100644 --- a/pom.xml +++ b/pom.xml @@ -57,7 +57,7 @@ 2.5 1.7 - 2.31 + 2.32 2.4.4 2.12.0 diff --git a/purity-ploidy-estimator/README.md b/purity-ploidy-estimator/README.md index e437de8f14..c276a8341a 100644 --- a/purity-ploidy-estimator/README.md +++ b/purity-ploidy-estimator/README.md @@ -129,6 +129,7 @@ java -jar purple.jar \ -amber /path/to/COLO829/amber \ -cobalt /path/to/COLO829/cobalt \ -gc_profile /path/to/GC_profile.hg19.1000bp.cnp \ + -ref_genome /path/to/Homo_sapiens_assembly38.fasta \ -somatic_vcf /path/to/COLO829/COLO829.somatic.vcf.gz \ -structural_vcf /path/to/COLO829/COLO829.sv.vcf.gz \ -sv_recovery_vcf /path/to/COLO829/COLO829.sv.all.vcf.gz \ @@ -943,6 +944,8 @@ Threads | Elapsed Time| CPU Time | Peak Mem ## Version History +- [2.32](https://github.com/hartwigmedical/hmftools/releases/tag/purple-v2-32) + - Fixed bug in sublconal modelling when somatic peak is close to max - [2.31](https://github.com/hartwigmedical/hmftools/releases/tag/purple-v2-31) - Added microsatellite status - Added subclonal likelihood model and figure diff --git a/purity-ploidy-estimator/src/main/resources/r/copyNumberPlots.R b/purity-ploidy-estimator/src/main/resources/r/copyNumberPlots.R index c0f32ebe1e..67af5a8ca5 100644 --- a/purity-ploidy-estimator/src/main/resources/r/copyNumberPlots.R +++ b/purity-ploidy-estimator/src/main/resources/r/copyNumberPlots.R @@ -150,13 +150,17 @@ copynumber_pdf <- function(copyNumberRegions) { } copyNumbers = read.table(file = paste0(purpleDir, "/", sample, ".purple.cnv.somatic.tsv"), sep = "\t", header = T, comment.char = "!") %>% - mutate(chromosome = gsub("chr", "", chromosome)) - -copyNumberPDF = copynumber_pdf(copyNumbers) -ggsave(filename = paste0(plotDir, "/", sample, ".copynumber.png"), copyNumberPDF, units = "in", height = 4, width = 4.8, scale = 1) + mutate(chromosome = gsub("chr", "", chromosome)) %>% + filter(!chromosome %in% c('X','Y'), bafCount > 0) + +if (nrow(copyNumbers) > 0) { + copyNumberPDF = copynumber_pdf(copyNumbers) + ggsave(filename = paste0(plotDir, "/", sample, ".copynumber.png"), copyNumberPDF, units = "in", height = 4, width = 4.8, scale = 1) + + minorAllelePloidyPDF = minor_allele_ploidy_pdf(copyNumbers) + ggsave(filename = paste0(plotDir, "/", sample, ".map.png"), minorAllelePloidyPDF, units = "in", height = 4, width = 4.8, scale = 1) +} -minorAllelePloidyPDF = minor_allele_ploidy_pdf(copyNumbers) -ggsave(filename = paste0(plotDir, "/", sample, ".map.png"), minorAllelePloidyPDF, units = "in", height = 4, width = 4.8, scale = 1) rangeDF = read.table(file = paste0(purpleDir, "/", sample, ".purple.purity.range.tsv"), sep = "\t", header = T, comment.char = "!") %>% select(purity, ploidy, score)