Skip to content

Commit

Permalink
Lower memory usage when parsing heap dumps on M
Browse files Browse the repository at this point in the history
Parsing an hprof file generated on M results in a Snapshot filled with duplicate GC root entries.  This extra memory usage leaves little room for LeakCanary to analyze and find the leak trace and often results in an OutOfMemoryError.  This commit removes the duplicate GC root entries to alleviate the memory pressure.

Fixes square#223
  • Loading branch information
jrodbx committed Mar 15, 2016
1 parent 8707bc2 commit 0f21131
Show file tree
Hide file tree
Showing 3 changed files with 95 additions and 0 deletions.
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ dependencies {
compile 'com.squareup.haha:haha:2.0.2'
compile project(':leakcanary-watcher')
testCompile 'junit:junit:4.12'
testCompile 'org.assertj:assertj-core:1.7.0'
}

android.libraryVariants.all { variant ->
Expand Down
33 changes: 33 additions & 0 deletions src/main/java/com/squareup/leakcanary/HeapAnalyzer.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
import com.squareup.haha.perflib.Type;
import com.squareup.haha.perflib.io.HprofBuffer;
import com.squareup.haha.perflib.io.MemoryMappedFileBuffer;
import com.squareup.haha.trove.THashMap;
import com.squareup.haha.trove.TObjectProcedure;

import java.io.File;
import java.util.ArrayList;
import java.util.List;
Expand Down Expand Up @@ -76,6 +79,7 @@ public AnalysisResult checkForLeak(File heapDumpFile, String referenceKey) {
HprofBuffer buffer = new MemoryMappedFileBuffer(heapDumpFile);
HprofParser parser = new HprofParser(buffer);
Snapshot snapshot = parser.parse();
deduplicateGcRoots(snapshot);

Instance leakingRef = findLeakingReference(referenceKey, snapshot);

Expand All @@ -90,6 +94,35 @@ public AnalysisResult checkForLeak(File heapDumpFile, String referenceKey) {
}
}

/**
* Pruning duplicates reduces memory pressure from hprof bloat added in Marshmallow.
*/
void deduplicateGcRoots(Snapshot snapshot) {
// THashMap has a smaller memory footprint than HashMap.
final THashMap<String, RootObj> uniqueRootMap = new THashMap<>();

final List<RootObj> gcRoots = (ArrayList) snapshot.getGCRoots();
for (RootObj root : gcRoots) {
String key = generateRootKey(root);
if (!uniqueRootMap.containsKey(key)) {
uniqueRootMap.put(key, root);
}
}

// Repopulate snapshot with unique GC roots.
gcRoots.clear();
uniqueRootMap.forEach(new TObjectProcedure<String>() {
@Override
public boolean execute(String key) {
return gcRoots.add(uniqueRootMap.get(key));
}
});
}

private String generateRootKey(RootObj root) {
return String.format("%s@0x%08x", root.getRootType().getName(), root.getId());
}

private Instance findLeakingReference(String key, Snapshot snapshot) {
ClassObj refClass = snapshot.findClass(KeyedWeakReference.class.getName());
List<String> keysFound = new ArrayList<>();
Expand Down
61 changes: 61 additions & 0 deletions src/test/java/com/squareup/leakcanary/HeapAnalyzerTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
package com.squareup.leakcanary;

import com.squareup.haha.perflib.RootObj;
import com.squareup.haha.perflib.Snapshot;

import org.junit.Before;
import org.junit.Test;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;

import static com.squareup.haha.perflib.RootType.NATIVE_STATIC;
import static com.squareup.haha.perflib.RootType.SYSTEM_CLASS;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;

public class HeapAnalyzerTest {
private static final ExcludedRefs NO_EXCLUDED_REFS = null;
private static final List<RootObj> DUP_ROOTS =
asList(new RootObj(SYSTEM_CLASS, 6L),
new RootObj(SYSTEM_CLASS, 5L),
new RootObj(SYSTEM_CLASS, 3L),
new RootObj(SYSTEM_CLASS, 5L),
new RootObj(NATIVE_STATIC, 3L));

private HeapAnalyzer heapAnalyzer;

@Before
public void setUp() {
heapAnalyzer = new HeapAnalyzer(NO_EXCLUDED_REFS);
}

@Test
public void ensureUniqueRoots() {
Snapshot snapshot = createSnapshot(DUP_ROOTS);

heapAnalyzer.deduplicateGcRoots(snapshot);

Collection<RootObj> uniqueRoots = snapshot.getGCRoots();
assertThat(uniqueRoots).hasSize(4);

List<Long> rootIds = new ArrayList<>();
for (RootObj root : uniqueRoots) {
rootIds.add(root.getId());
}
Collections.sort(rootIds);

// 3 appears twice because even though two RootObjs have the same id, they're different types.
assertThat(rootIds).containsExactly(3L, 3L, 5L, 6L);
}

private Snapshot createSnapshot(List<RootObj> gcRoots) {
Snapshot snapshot = new Snapshot(null);
for (RootObj root : gcRoots) {
snapshot.addRoot(root);
}
return snapshot;
}
}

0 comments on commit 0f21131

Please sign in to comment.