diff --git a/changelog.txt b/changelog.txt index 25aa83d76c3..74a3b379c9e 100644 --- a/changelog.txt +++ b/changelog.txt @@ -28,6 +28,8 @@ The Nullness Checker now treats `System.getProperty()` soundly. Use Add support for class qualifier parameters. See "Class qualifier parameters" section in the manual. +The Dataflow Framework now also supports backward analysis. + Implementation details: Changed the types of some fields and methods from array to List: @@ -37,6 +39,13 @@ Changed the types of some fields and methods from array to List: * QualifierDefaults.STANDARD_UNCHECKED_DEFAULTS_TOP * QualifierDefaults.STANDARD_UNCHECKED_DEFAULTS_BOTTOM +Dataflow Framework: Analysis is now an interface, added AbstractAnalysis, +ForwardAnalysis, ForwardTransferFunction, ForwardAnalysisImpl, +BackwardAnalysis, BackwardTransferFunction, BackwardAnalysisImpl. +Existing code is easy to adapt: + `extends Analysis` => `extends ForwardAnalysisImpl` + `implements TransferFunction` => `implements ForwardTransferFunction` + In AbstractQualifierPolymorphism, use AnnotationMirrors instead of sets of annotation mirrors. diff --git a/dataflow/manual/content.tex b/dataflow/manual/content.tex index f40c3771186..c34c8bccd84 100644 --- a/dataflow/manual/content.tex +++ b/dataflow/manual/content.tex @@ -378,7 +378,13 @@ \subsubsubsection{TransferFunction} package org.checkerframework.dataflow.analysis; interface TransferFunction, S extends Store> - extends NodeVisitor, TransferInput> + extends NodeVisitor, TransferInput> +interface ForwardTransferFunction, + S extends Store> + extends TransferFunction +interface BackwardTransferFunction, + S extends Store> + extends TransferFunction \end{verbatim} The Checker Framework defines a derived class of TransferFunction to @@ -392,7 +398,7 @@ \subsubsubsection{TransferFunction} S extends CFAbstractStore, T extends CFAbstractTransfer> extends AbstractNodeVisitor, TransferInput> - implements TransferFunction + implements ForwardTransferFunction class CFTransfer extends CFAbstractTransfer \end{verbatim} @@ -421,14 +427,26 @@ \subsubsection{Analysis} \label{sec:analysis_classes} An Analysis performs iterative dataflow analysis over a control flow -graph using a given transfer function. Currently only forward +graph using a given transfer function. Both forward and backward analyses are supported. \begin{verbatim} package org.checkerframework.dataflow.analysis; -class Analysis, +interface Analysis, + S extends Store, + T extends TransferFunction> +abstract class AbstractAnalysis, + S extends Store, + T extends TransferFunction> + implements Analysis +interface ForwardAnalysis, S extends Store, - T extends TransferFunction> + T extends ForwardTransferFunction> + extends Analysis +interface BackwardAnalysis, + S extends Store, + T extends BackwardTransferFunction> + extends Analysis \end{verbatim} The Checker Framework defines a derived class of Analysis for use as @@ -441,7 +459,7 @@ \subsubsection{Analysis} abstract class CFAbstractAnalysis, S extends CFAbstractStore, T extends CFAbstractTransfer> - extends Analysis + extends ForwardAnalysisImpl class CFAnalysis extends CFAbstractAnalysis \end{verbatim} @@ -1408,13 +1426,11 @@ \subsection{Overview} Roughly, a dataflow analysis in the framework works as follows. Given the abstract syntax tree of a method, the framework computes the corresponding control-flow graph as described in -\autoref{sec:cfg}. Then, a simple forward iterative algorithm is used -to compute a fix-point, by iteratively applying a set of transfer -functions to the nodes in the CFG\@. (For our initial application, -type-checking, we do not need to support backwards analyses; in the -future, we may wish to do so.) These transfer functions are specific -to the particular analysis and are used to approximate the runtime -behavior of different statements and expressions. +\autoref{sec:cfg}. Then, a simple forward or backward iterative +algorithm is used to compute a fix-point, by iteratively applying a +set of transfer functions to the nodes in the CFG\@. These transfer +functions are specific to the particular analysis and are used to +approximate the runtime behavior of different statements and expressions. An analysis result contains two parts: diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AbstractAnalysis.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AbstractAnalysis.java new file mode 100644 index 00000000000..36ee3346a60 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AbstractAnalysis.java @@ -0,0 +1,535 @@ +package org.checkerframework.dataflow.analysis; + +import com.sun.source.tree.ClassTree; +import com.sun.source.tree.MethodTree; +import com.sun.source.tree.Tree; +import java.util.Comparator; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Map; +import java.util.Objects; +import java.util.PriorityQueue; +import java.util.Set; +import javax.lang.model.element.Element; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; +import org.checkerframework.dataflow.cfg.ControlFlowGraph; +import org.checkerframework.dataflow.cfg.block.Block; +import org.checkerframework.dataflow.cfg.block.SpecialBlock; +import org.checkerframework.dataflow.cfg.node.AssignmentNode; +import org.checkerframework.dataflow.cfg.node.LocalVariableNode; +import org.checkerframework.dataflow.cfg.node.Node; +import org.checkerframework.javacutil.BugInCF; +import org.checkerframework.javacutil.ElementUtils; + +/** + * Implementation of common features for {@link BackwardAnalysisImpl} and {@link + * ForwardAnalysisImpl}. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + * @param the transfer function type that is used to approximated runtime behavior + */ +public abstract class AbstractAnalysis< + V extends AbstractValue, S extends Store, T extends TransferFunction> + implements Analysis { + + /** The direction of this analysis. */ + protected final Direction direction; + + /** Is the analysis currently running? */ + protected boolean isRunning = false; + + /** The transfer function for regular nodes. */ + // TODO: make final. Currently, the transferFunction has a reference to the analysis, so it + // can't be created until the Analysis is initialized. + protected @Nullable T transferFunction; + + /** The current control flow graph to perform the analysis on. */ + protected @MonotonicNonNull ControlFlowGraph cfg; + + /** + * The transfer inputs of every basic block (assumed to be 'no information' if not present, + * inputs before blocks in forward analysis, after blocks in backward analysis). + */ + protected final IdentityHashMap> inputs; + + /** The worklist used for the fix-point iteration. */ + protected final Worklist worklist; + + /** Abstract values of nodes. */ + protected final IdentityHashMap nodeValues; + + /** Map from (effectively final) local variable elements to their abstract value. */ + protected final HashMap finalLocalValues; + + /** + * The node that is currently handled in the analysis (if it is running). The following + * invariant holds: + * + *
+     *   !isRunning ==> (currentNode == null)
+     * 
+ */ + protected @Nullable Node currentNode; + + /** + * The tree that is currently being looked at. The transfer function can set this tree to make + * sure that calls to {@code getValue} will not return information for this given tree. + */ + protected @Nullable Tree currentTree; + + /** The current transfer input when the analysis is running. */ + protected @Nullable TransferInput currentInput; + + /** + * Returns the tree that is currently being looked at. The transfer function can set this tree + * to make sure that calls to {@code getValue} will not return information for this given tree. + * + * @return the tree that is currently being looked at + */ + public @Nullable Tree getCurrentTree() { + return currentTree; + } + + /** + * Set the tree that is currently being looked at. + * + * @param currentTree the tree that should be currently looked at + */ + public void setCurrentTree(Tree currentTree) { + this.currentTree = currentTree; + } + + /** + * Implementation of common features for {@link BackwardAnalysisImpl} and {@link + * ForwardAnalysisImpl}. + * + * @param direction direction of the analysis + */ + protected AbstractAnalysis(Direction direction) { + this.direction = direction; + this.inputs = new IdentityHashMap<>(); + this.worklist = new Worklist(this.direction); + this.nodeValues = new IdentityHashMap<>(); + this.finalLocalValues = new HashMap<>(); + } + + /** Initialize the transfer inputs of every basic block before performing the analysis. */ + @RequiresNonNull("cfg") + protected abstract void initInitialInputs(); + + /** + * Propagate the stores in {@code currentInput} to the next block in the direction of analysis, + * according to the {@code flowRule}. + * + * @param nextBlock the target block to propagate the stores to + * @param node the node of the target block + * @param currentInput the current transfer input + * @param flowRule the flow rule being used + * @param addToWorklistAgain whether the block should be added to {@link #worklist} again + */ + protected abstract void propagateStoresTo( + Block nextBlock, + Node node, + TransferInput currentInput, + Store.FlowRule flowRule, + boolean addToWorklistAgain); + + @Override + public boolean isRunning() { + return isRunning; + } + + @Override + public Direction getDirection() { + return this.direction; + } + + @Override + @SuppressWarnings("contracts.precondition.override.invalid") // implementation field + @RequiresNonNull("cfg") + public AnalysisResult getResult() { + if (isRunning) { + throw new BugInCF( + "AbstractAnalysis::getResult() shouldn't be called when the analysis is running."); + } + return new AnalysisResult<>( + nodeValues, + inputs, + cfg.getTreeLookup(), + cfg.getUnaryAssignNodeLookup(), + finalLocalValues); + } + + @Override + public @Nullable T getTransferFunction() { + return transferFunction; + } + + @Override + public @Nullable V getValue(Node n) { + if (isRunning) { + // we don't have a org.checkerframework.dataflow fact about the current node yet + if (currentNode == null + || currentNode == n + || (currentTree != null && currentTree == n.getTree())) { + return null; + } + // check that 'n' is a subnode of 'node'. Check immediate operands + // first for efficiency. + assert !n.isLValue() : "Did not expect an lvalue, but got " + n; + if (currentNode == n + || (!currentNode.getOperands().contains(n) + && !currentNode.getTransitiveOperands().contains(n))) { + return null; + } + // fall through when the current node is not 'n', and 'n' is not a subnode. + } + return nodeValues.get(n); + } + + /** + * Returns all current node values. + * + * @return {@link #nodeValues} + */ + public IdentityHashMap getNodeValues() { + return nodeValues; + } + + /** + * Set all current node values to the given map. + * + * @param in the current node values + */ + /*package-private*/ void setNodeValues(IdentityHashMap in) { + assert !isRunning; + nodeValues.clear(); + nodeValues.putAll(in); + } + + @Override + @SuppressWarnings("contracts.precondition.override.invalid") // implementation field + @RequiresNonNull("cfg") + public @Nullable S getRegularExitStore() { + SpecialBlock regularExitBlock = cfg.getRegularExitBlock(); + if (inputs.containsKey(regularExitBlock)) { + return inputs.get(regularExitBlock).getRegularStore(); + } else { + return null; + } + } + + @Override + @SuppressWarnings("contracts.precondition.override.invalid") // implementation field + @RequiresNonNull("cfg") + public @Nullable S getExceptionalExitStore() { + SpecialBlock exceptionalExitBlock = cfg.getExceptionalExitBlock(); + if (inputs.containsKey(exceptionalExitBlock)) { + S exceptionalExitStore = inputs.get(exceptionalExitBlock).getRegularStore(); + return exceptionalExitStore; + } else { + return null; + } + } + + /** + * Get the set of {@link Node}s for a given {@link Tree}. Returns null for trees that don't + * produce a value. + * + * @param t the given tree + * @return the set of corresponding nodes to the given tree + */ + public @Nullable Set getNodesForTree(Tree t) { + if (cfg == null) { + return null; + } + return cfg.getNodesCorrespondingToTree(t); + } + + /** + * Return the abstract value for {@link Tree} {@code t}, or {@code null} if no information is + * available. Note that if the analysis has not finished yet, this value might not represent the + * final value for this node. + * + * @param t the given tree + * @return the abstract value for the given tree + */ + public @Nullable V getValue(Tree t) { + // we don't have a org.checkerframework.dataflow fact about the current node yet + if (t == currentTree) { + return null; + } + Set nodesCorrespondingToTree = getNodesForTree(t); + if (nodesCorrespondingToTree == null) { + return null; + } + V merged = null; + for (Node aNode : nodesCorrespondingToTree) { + if (aNode.isLValue()) { + return null; + } + V v = getValue(aNode); + if (merged == null) { + merged = v; + } else if (v != null) { + merged = merged.leastUpperBound(v); + } + } + return merged; + } + + /** + * Get the {@link MethodTree} of the current CFG if the argument {@link Tree} maps to a {@link + * Node} in the CFG or {@code null} otherwise. + * + * @param t the given tree + * @return the contained method tree of the given tree + */ + public @Nullable MethodTree getContainingMethod(Tree t) { + if (cfg == null) { + return null; + } + return cfg.getContainingMethod(t); + } + + /** + * Get the {@link ClassTree} of the current CFG if the argument {@link Tree} maps to a {@link + * Node} in the CFG or {@code null} otherwise. + * + * @param t the given tree + * @return the contained class tree of the given tree + */ + public @Nullable ClassTree getContainingClass(Tree t) { + if (cfg == null) { + return null; + } + return cfg.getContainingClass(t); + } + + /** + * Call the transfer function for node {@code node}, and set that node as current node first. + * This method requires a {@code transferInput} that the method can modify. + * + * @param node the given node + * @param transferInput the transfer input + * @return the output of the transfer function + */ + protected TransferResult callTransferFunction( + Node node, TransferInput transferInput) { + assert transferFunction != null : "@AssumeAssertion(nullness): invariant"; + if (node.isLValue()) { + // TODO: should the default behavior return a regular transfer result, a conditional + // transfer result (depending on store.hasTwoStores()), or is the following correct? + return new RegularTransferResult<>(null, transferInput.getRegularStore()); + } + transferInput.node = node; + currentNode = node; + TransferResult transferResult = node.accept(transferFunction, transferInput); + currentNode = null; + if (node instanceof AssignmentNode) { + // store the flow-refined value effectively for final local variables + AssignmentNode assignment = (AssignmentNode) node; + Node lhst = assignment.getTarget(); + if (lhst instanceof LocalVariableNode) { + LocalVariableNode lhs = (LocalVariableNode) lhst; + Element elem = lhs.getElement(); + if (ElementUtils.isEffectivelyFinal(elem)) { + V resval = transferResult.getResultValue(); + if (resval != null) { + finalLocalValues.put(elem, resval); + } + } + } + } + return transferResult; + } + + /** + * Initialize the analysis with a new control flow graph. + * + * @param cfg the control flow graph to use + */ + protected final void init(ControlFlowGraph cfg) { + initFields(cfg); + initInitialInputs(); + } + + /** + * Initialize class fields based on a given control flow graph. Sub-class may override this + * method to initialize customized fields. + * + * @param cfg a given control flow graph + */ + @EnsuresNonNull("this.cfg") + protected void initFields(ControlFlowGraph cfg) { + inputs.clear(); + nodeValues.clear(); + finalLocalValues.clear(); + this.cfg = cfg; + } + + /** + * Updates the value of node {@code node} to the value of the {@code transferResult}. Returns + * true if the node's value changed, or a store was updated. + * + * @param node the node to update + * @param transferResult the transfer result being updated + * @return true if the node's value changed, or a store was updated + */ + protected boolean updateNodeValues(Node node, TransferResult transferResult) { + V newVal = transferResult.getResultValue(); + boolean nodeValueChanged = false; + if (newVal != null) { + V oldVal = nodeValues.get(node); + nodeValues.put(node, newVal); + nodeValueChanged = !Objects.equals(oldVal, newVal); + } + return nodeValueChanged || transferResult.storeChanged(); + } + + /** + * Read the store for a particular basic block from a map of stores (or {@code null} if none + * exists yet). + * + * @param stores a map of stores + * @param b the target block + * @param method return type should be a subtype of {@link Store} + * @return the store for the target block + */ + protected static @Nullable S readFromStore(Map stores, Block b) { + return stores.get(b); + } + + /** + * Add a basic block to {@link #worklist}. If {@code b} is already present, the method does + * nothing. + * + * @param b the block to add to {@link #worklist} + */ + protected void addToWorklist(Block b) { + // TODO: use a more efficient way to check if b is already present + if (!worklist.contains(b)) { + worklist.add(b); + } + } + + /** + * A worklist is a priority queue of blocks in which the order is given by depth-first ordering + * to place non-loop predecessors ahead of successors. + */ + protected static class Worklist { + + /** Map all blocks in the CFG to their depth-first order. */ + protected final IdentityHashMap depthFirstOrder; + + /** + * Comparators to allow priority queue to order blocks by their depth-first order, using by + * forward analysis. + */ + public class ForwardDFOComparator implements Comparator { + @SuppressWarnings("unboxing.of.nullable") + @Override + public int compare(Block b1, Block b2) { + return depthFirstOrder.get(b1) - depthFirstOrder.get(b2); + } + } + + /** + * Comparators to allow priority queue to order blocks by their depth-first order, using by + * backward analysis. + */ + public class BackwardDFOComparator implements Comparator { + @SuppressWarnings("unboxing.of.nullable") + @Override + public int compare(Block b1, Block b2) { + return depthFirstOrder.get(b2) - depthFirstOrder.get(b1); + } + } + + /** The backing priority queue. */ + protected final PriorityQueue queue; + + /** + * Create a Worklist. + * + * @param direction the direction (forward or backward) + */ + public Worklist(Direction direction) { + depthFirstOrder = new IdentityHashMap<>(); + + if (direction == Direction.FORWARD) { + queue = new PriorityQueue<>(11, new ForwardDFOComparator()); + } else if (direction == Direction.BACKWARD) { + queue = new PriorityQueue<>(11, new BackwardDFOComparator()); + } else { + throw new BugInCF("Unexpected Direction meet: " + direction.name()); + } + } + + /** + * Process the control flow graph, add the blocks to {@link #depthFirstOrder}. + * + * @param cfg the control flow graph to process + */ + public void process(ControlFlowGraph cfg) { + depthFirstOrder.clear(); + int count = 1; + for (Block b : cfg.getDepthFirstOrderedBlocks()) { + depthFirstOrder.put(b, count++); + } + + queue.clear(); + } + + /** + * See {@link PriorityQueue#isEmpty}. + * + * @see PriorityQueue#isEmpty + * @return true if {@link #queue} is empty else false + */ + @EnsuresNonNullIf(result = false, expression = "poll()") + @SuppressWarnings("nullness:contracts.conditional.postcondition.not.satisfied") // forwarded + public boolean isEmpty() { + return queue.isEmpty(); + } + + /** + * Check if {@link #queue} contains the block which is passed as the argument. + * + * @param block the given block to check + * @return true if {@link #queue} contains the given block + */ + public boolean contains(Block block) { + return queue.contains(block); + } + + /** + * Add the given block to {@link #queue}. + * + * @param block the block to add to {@link #queue} + */ + public void add(Block block) { + queue.add(block); + } + + /** + * See {@link PriorityQueue#poll}. + * + * @see PriorityQueue#poll + * @return the head of {@link #queue} + */ + public @Nullable Block poll() { + return queue.poll(); + } + + @Override + public String toString() { + return "Worklist(" + queue + ")"; + } + } +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/Analysis.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/Analysis.java index c0287fd879d..476c61a75cd 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/Analysis.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/Analysis.java @@ -1,898 +1,120 @@ package org.checkerframework.dataflow.analysis; -import com.sun.source.tree.ClassTree; -import com.sun.source.tree.LambdaExpressionTree; -import com.sun.source.tree.MethodTree; -import com.sun.source.tree.Tree; -import com.sun.source.tree.VariableTree; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; import java.util.IdentityHashMap; -import java.util.List; import java.util.Map; -import java.util.Objects; -import java.util.PriorityQueue; -import java.util.Set; -import javax.lang.model.element.Element; -import javax.lang.model.type.TypeMirror; -import org.checkerframework.checker.nullness.qual.EnsuresNonNull; -import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; -import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.Nullable; -import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.dataflow.cfg.ControlFlowGraph; -import org.checkerframework.dataflow.cfg.UnderlyingAST; -import org.checkerframework.dataflow.cfg.UnderlyingAST.CFGLambda; -import org.checkerframework.dataflow.cfg.UnderlyingAST.CFGMethod; -import org.checkerframework.dataflow.cfg.UnderlyingAST.Kind; import org.checkerframework.dataflow.cfg.block.Block; -import org.checkerframework.dataflow.cfg.block.ConditionalBlock; -import org.checkerframework.dataflow.cfg.block.ExceptionBlock; -import org.checkerframework.dataflow.cfg.block.RegularBlock; -import org.checkerframework.dataflow.cfg.block.SpecialBlock; -import org.checkerframework.dataflow.cfg.node.AssignmentNode; -import org.checkerframework.dataflow.cfg.node.LocalVariableNode; import org.checkerframework.dataflow.cfg.node.Node; -import org.checkerframework.dataflow.cfg.node.ReturnNode; -import org.checkerframework.javacutil.ElementUtils; -import org.checkerframework.javacutil.Pair; /** - * An implementation of an iterative algorithm to solve a org.checkerframework.dataflow problem, - * given a control flow graph and a transfer function. + * This interface defines a dataflow analysis, given a control flow graph and a transfer function. A + * dataflow analysis has a direction, either forward or backward. The direction of corresponding + * transfer function is consistent with the analysis, i.e. a forward analysis has a forward transfer + * function, and a backward analysis has a backward transfer function. * * @param the abstract value type to be tracked by the analysis * @param the store type used in the analysis * @param the transfer function type that is used to approximated runtime behavior */ -public class Analysis< +public interface Analysis< V extends AbstractValue, S extends Store, T extends TransferFunction> { - /** Is the analysis currently running? */ - protected boolean isRunning = false; - - /** The transfer function for regular nodes. */ - // TODO: make final. Currently, the transferFunction has a reference to the analysis, so it - // can't be created until the Analysis is initialized. - protected @Nullable T transferFunction; - - /** The current control flow graph to perform the analysis on. */ - protected @MonotonicNonNull ControlFlowGraph cfg; - - /** Then stores before every basic block (assumed to be 'no information' if not present). */ - protected final IdentityHashMap thenStores; - - /** Else stores before every basic block (assumed to be 'no information' if not present). */ - protected final IdentityHashMap elseStores; - - /** - * Number of times every block has been analyzed since the last time widening was applied. Null, - * if maxCountBeforeWidening is -1 which implies widening isn't used for this analysis. - */ - protected final @Nullable IdentityHashMap blockCount; - - /** - * Number of times a block can be analyzed before widening. -1 implies that widening shouldn't - * be used. - */ - protected final int maxCountBeforeWidening; - - /** - * The transfer inputs before every basic block (assumed to be 'no information' if not present). - */ - protected final IdentityHashMap> inputs; - - /** The stores after every return statement. */ - protected final IdentityHashMap> storesAtReturnStatements; - - /** The worklist used for the fix-point iteration. */ - protected final Worklist worklist; - - /** Abstract values of nodes. */ - protected final IdentityHashMap nodeValues; - - /** Map from (effectively final) local variable elements to their abstract value. */ - public final HashMap finalLocalValues; - - /** - * The node that is currently handled in the analysis (if it is running). The following - * invariant holds: - * - *
-     *   !isRunning ⇒ (currentNode == null)
-     * 
- */ - protected @Nullable Node currentNode; - - /** - * The tree that is currently being looked at. The transfer function can set this tree to make - * sure that calls to {@code getValue} will not return information for this given tree. - */ - protected @Nullable Tree currentTree; - - /** The current transfer input when the analysis is running. */ - protected @Nullable TransferInput currentInput; - - /** The tree that is currently being looked at. */ - public @Nullable Tree getCurrentTree() { - return currentTree; - } - - public void setCurrentTree(Tree currentTree) { - this.currentTree = currentTree; - } - - // `@code`, not `@link`, because dataflow module doesn't depend on framework module. - /** - * Construct an object that can perform a org.checkerframework.dataflow analysis over a control - * flow graph. The transfer function is set by the subclass, e.g., {@code - * org.checkerframework.framework.flow.CFAbstractAnalysis}, later. - */ - public Analysis() { - this(null, -1); - } - - // `@code`, not `@link`, because dataflow module doesn't depend on framework moduel. - /** - * Construct an object that can perform a org.checkerframework.dataflow analysis over a control - * flow graph. The transfer function is set by the subclass, e.g., {@code - * org.checkerframework.framework.flow.CFAbstractAnalysis}, later. - * - * @param maxCountBeforeWidening number of times a block can be analyzed before widening - */ - public Analysis(int maxCountBeforeWidening) { - this(null, maxCountBeforeWidening); - } - - /** - * Construct an object that can perform a org.checkerframework.dataflow analysis over a control - * flow graph, given a transfer function. - * - * @param transfer transfer function - */ - public Analysis(T transfer) { - this(transfer, -1); + /** The direction of an analysis instance. */ + enum Direction { + /** The forward direction. */ + FORWARD, + /** The backward direction. */ + BACKWARD } /** - * Construct an object that can perform a org.checkerframework.dataflow analysis over a control - * flow graph, given a transfer function. + * Get the direction of this analysis. * - * @param transfer transfer function - * @param maxCountBeforeWidening number of times a block can be analyzed before widening + * @return the direction of this analysis */ - public Analysis(@Nullable T transfer, int maxCountBeforeWidening) { - this.transferFunction = transfer; - this.maxCountBeforeWidening = maxCountBeforeWidening; - this.thenStores = new IdentityHashMap<>(); - this.elseStores = new IdentityHashMap<>(); - this.blockCount = maxCountBeforeWidening == -1 ? null : new IdentityHashMap<>(); - this.inputs = new IdentityHashMap<>(); - this.storesAtReturnStatements = new IdentityHashMap<>(); - this.worklist = new Worklist(); - this.nodeValues = new IdentityHashMap<>(); - this.finalLocalValues = new HashMap<>(); - } + Direction getDirection(); /** - * The current transfer function. + * Is the analysis currently running? * - * @return {@link #transferFunction} + * @return true if the analysis is running currently, else false */ - public @Nullable T getTransferFunction() { - return transferFunction; - } + boolean isRunning(); /** * Perform the actual analysis. * - * @param cfg the control flow graph used to perform analysis + * @param cfg the control flow graph */ - public void performAnalysis(ControlFlowGraph cfg) { - assert !isRunning; - isRunning = true; - - try { - init(cfg); - - while (!worklist.isEmpty()) { - Block b = worklist.poll(); - performAnalysisBlock(b); - } - } finally { - assert isRunning; - // In case performAnalysisBlock crashed, reset isRunning to false. - isRunning = false; - } - } - - /** Perform the actual analysis on one block. */ - protected void performAnalysisBlock(Block b) { - switch (b.getType()) { - case REGULAR_BLOCK: - { - RegularBlock rb = (RegularBlock) b; - - // apply transfer function to contents - TransferInput inputBefore = getInputBefore(rb); - assert inputBefore != null : "@AssumeAssertion(nullness): invariant"; - currentInput = inputBefore.copy(); - TransferResult transferResult = null; - Node lastNode = null; - boolean addToWorklistAgain = false; - for (Node n : rb.getContents()) { - assert currentInput != null : "@AssumeAssertion(nullness): invariant"; - transferResult = callTransferFunction(n, currentInput); - addToWorklistAgain |= updateNodeValues(n, transferResult); - currentInput = new TransferInput<>(n, this, transferResult); - lastNode = n; - } - assert currentInput != null : "@AssumeAssertion(nullness): invariant"; - // loop will run at least once, making transferResult non-null - - // propagate store to successors - Block succ = rb.getSuccessor(); - assert succ != null - : "@AssumeAssertion(nullness): regular basic block without non-exceptional successor unexpected"; - propagateStoresTo( - succ, lastNode, currentInput, rb.getFlowRule(), addToWorklistAgain); - break; - } - - case EXCEPTION_BLOCK: - { - ExceptionBlock eb = (ExceptionBlock) b; - - // apply transfer function to content - TransferInput inputBefore = getInputBefore(eb); - assert inputBefore != null : "@AssumeAssertion(nullness): invariant"; - currentInput = inputBefore.copy(); - Node node = eb.getNode(); - TransferResult transferResult = callTransferFunction(node, currentInput); - boolean addToWorklistAgain = updateNodeValues(node, transferResult); - - // propagate store to successor - Block succ = eb.getSuccessor(); - if (succ != null) { - currentInput = new TransferInput<>(node, this, transferResult); - // TODO? Variable wasn't used. - // Store.FlowRule storeFlow = eb.getFlowRule(); - propagateStoresTo( - succ, node, currentInput, eb.getFlowRule(), addToWorklistAgain); - } - - // propagate store to exceptional successors - for (Map.Entry> e : - eb.getExceptionalSuccessors().entrySet()) { - TypeMirror cause = e.getKey(); - S exceptionalStore = transferResult.getExceptionalStore(cause); - if (exceptionalStore != null) { - for (Block exceptionSucc : e.getValue()) { - addStoreBefore( - exceptionSucc, - node, - exceptionalStore, - Store.Kind.BOTH, - addToWorklistAgain); - } - } else { - for (Block exceptionSucc : e.getValue()) { - addStoreBefore( - exceptionSucc, - node, - inputBefore.copy().getRegularStore(), - Store.Kind.BOTH, - addToWorklistAgain); - } - } - } - break; - } - - case CONDITIONAL_BLOCK: - { - ConditionalBlock cb = (ConditionalBlock) b; - - // get store before - TransferInput inputBefore = getInputBefore(cb); - assert inputBefore != null : "@AssumeAssertion(nullness): invariant"; - TransferInput input = inputBefore.copy(); - - // propagate store to successor - Block thenSucc = cb.getThenSuccessor(); - Block elseSucc = cb.getElseSuccessor(); - - propagateStoresTo(thenSucc, null, input, cb.getThenFlowRule(), false); - propagateStoresTo(elseSucc, null, input, cb.getElseFlowRule(), false); - break; - } - - case SPECIAL_BLOCK: - { - // special basic blocks are empty and cannot throw exceptions, - // thus there is no need to perform any analysis. - SpecialBlock sb = (SpecialBlock) b; - Block succ = sb.getSuccessor(); - if (succ != null) { - TransferInput input = getInputBefore(b); - assert input != null : "@AssumeAssertion(nullness): invariant"; - propagateStoresTo(succ, null, input, sb.getFlowRule(), false); - } - break; - } - - default: - assert false; - break; - } - } + void performAnalysis(ControlFlowGraph cfg); /** - * Propagate the stores in currentInput to the successor block, succ, according to the flowRule. - */ - protected void propagateStoresTo( - Block succ, - @Nullable Node node, - TransferInput currentInput, - Store.FlowRule flowRule, - boolean addToWorklistAgain) { - switch (flowRule) { - case EACH_TO_EACH: - if (currentInput.containsTwoStores()) { - addStoreBefore( - succ, - node, - currentInput.getThenStore(), - Store.Kind.THEN, - addToWorklistAgain); - addStoreBefore( - succ, - node, - currentInput.getElseStore(), - Store.Kind.ELSE, - addToWorklistAgain); - } else { - addStoreBefore( - succ, - node, - currentInput.getRegularStore(), - Store.Kind.BOTH, - addToWorklistAgain); - } - break; - case THEN_TO_BOTH: - addStoreBefore( - succ, - node, - currentInput.getThenStore(), - Store.Kind.BOTH, - addToWorklistAgain); - break; - case ELSE_TO_BOTH: - addStoreBefore( - succ, - node, - currentInput.getElseStore(), - Store.Kind.BOTH, - addToWorklistAgain); - break; - case THEN_TO_THEN: - addStoreBefore( - succ, - node, - currentInput.getThenStore(), - Store.Kind.THEN, - addToWorklistAgain); - break; - case ELSE_TO_ELSE: - addStoreBefore( - succ, - node, - currentInput.getElseStore(), - Store.Kind.ELSE, - addToWorklistAgain); - break; - } - } - - /** - * Updates the value of node {@code node} to the value of the {@code transferResult}. Returns - * true if the node's value changed, or a store was updated. - * - * @param node a node - * @param transferResult the new transfer result to use as {@code node}'s value - * @return true if the node's value changed, or a store was updated - */ - protected boolean updateNodeValues(Node node, TransferResult transferResult) { - V newVal = transferResult.getResultValue(); - boolean nodeValueChanged = false; - - if (newVal != null) { - V oldVal = nodeValues.get(node); - nodeValues.put(node, newVal); - nodeValueChanged = !Objects.equals(oldVal, newVal); - } - - return nodeValueChanged || transferResult.storeChanged(); - } - - /** - * Call the transfer function for node {@code node}, and set that node as current node first. + * Perform the actual analysis on one block. * - * @param node a node - * @param store the input of a transfer function - * @return the transfer result for the node + * @param b the block to analyze */ - protected TransferResult callTransferFunction(Node node, TransferInput store) { - assert transferFunction != null : "@AssumeAssertion(nullness): invariant"; - if (node.isLValue()) { - // TODO: should the default behavior be to return either a regular - // transfer result or a conditional transfer result (depending on - // store.hasTwoStores()), or is the following correct? - return new RegularTransferResult<>(null, store.getRegularStore()); - } - store.node = node; - currentNode = node; - TransferResult transferResult = node.accept(transferFunction, store); - currentNode = null; - if (node instanceof ReturnNode) { - // save a copy of the store to later check if some property held at - // a given return statement - storesAtReturnStatements.put((ReturnNode) node, transferResult); - } - if (node instanceof AssignmentNode) { - // store the flow-refined value for effectively final local variables - AssignmentNode assignment = (AssignmentNode) node; - Node lhst = assignment.getTarget(); - if (lhst instanceof LocalVariableNode) { - LocalVariableNode lhs = (LocalVariableNode) lhst; - Element elem = lhs.getElement(); - if (ElementUtils.isEffectivelyFinal(elem)) { - V resval = transferResult.getResultValue(); - if (resval != null) { - finalLocalValues.put(elem, resval); - } - } - } - } - return transferResult; - } + void performAnalysisBlock(Block b); /** - * Initialize the analysis with a new control flow graph. + * Runs the analysis again within the block of {@code node} and returns the store at the + * location of {@code node}. If {@code before} is true, then the store immediately before the + * {@link Node} {@code node} is returned. Otherwise, the store immediately after {@code node} is + * returned. If {@code analysisCaches} is not null, this method uses a cache. {@code + * analysisCaches} is a map of a block of node to the cached analysis result. If the cache for + * {@code transferInput} is not in {@code analysisCaches}, this method creates new cache and + * stores it in {@code analysisCaches}. The cache is a map of nodes to the analysis results of + * the nodes. * - * @param cfg the control flow graph to use - */ - @EnsuresNonNull("this.cfg") - protected void init(ControlFlowGraph cfg) { - thenStores.clear(); - elseStores.clear(); - if (blockCount != null) { - blockCount.clear(); - } - inputs.clear(); - storesAtReturnStatements.clear(); - nodeValues.clear(); - finalLocalValues.clear(); - - this.cfg = cfg; - worklist.process(cfg); - worklist.add(cfg.getEntryBlock()); - - List parameters = null; - UnderlyingAST underlyingAST = cfg.getUnderlyingAST(); - if (underlyingAST.getKind() == Kind.METHOD) { - MethodTree tree = ((CFGMethod) underlyingAST).getMethod(); - parameters = new ArrayList<>(); - for (VariableTree p : tree.getParameters()) { - LocalVariableNode var = new LocalVariableNode(p); - parameters.add(var); - // TODO: document that LocalVariableNode has no block that it - // belongs to - } - } else if (underlyingAST.getKind() == Kind.LAMBDA) { - LambdaExpressionTree lambda = ((CFGLambda) underlyingAST).getLambdaTree(); - parameters = new ArrayList<>(); - for (VariableTree p : lambda.getParameters()) { - LocalVariableNode var = new LocalVariableNode(p); - parameters.add(var); - // TODO: document that LocalVariableNode has no block that it - // belongs to - } - - } else { - // nothing to do - } - assert transferFunction != null : "@AssumeAssertion(nullness): invariant"; - S initialStore = transferFunction.initialStore(underlyingAST, parameters); - Block entry = cfg.getEntryBlock(); - thenStores.put(entry, initialStore); - elseStores.put(entry, initialStore); - inputs.put(entry, new TransferInput<>(null, this, initialStore)); - } - - /** - * Add a basic block to the worklist. If {@code b} is already present, the method does nothing. + * @param node the node to analyze + * @param before the boolean value to indicate which store to return (if it is true, return the + * store immediately before {@code node}; otherwise, the store after {@code node} is + * returned) + * @param transferInput the transfer input of the block of this node + * @param nodeValues abstract values of nodes + * @param analysisCaches caches of analysis results + * @return the store before or after {@code node} (depends on the value of {@code before}) after + * running the analysis */ - protected void addToWorklist(Block b) { - // TODO: use a more efficient way to check if b is already present - if (!worklist.contains(b)) { - worklist.add(b); - } - } + S runAnalysisFor( + Node node, + boolean before, + TransferInput transferInput, + IdentityHashMap nodeValues, + Map, IdentityHashMap>> analysisCaches); /** - * Add a store before the basic block {@code b} by merging with the existing stores for that - * location. - */ - protected void addStoreBefore( - Block b, @Nullable Node node, S s, Store.Kind kind, boolean addBlockToWorklist) { - S thenStore = getStoreBefore(b, Store.Kind.THEN); - S elseStore = getStoreBefore(b, Store.Kind.ELSE); - boolean shouldWiden = false; - - if (blockCount != null) { - Integer count = blockCount.get(b); - if (count == null) { - count = 0; - } - shouldWiden = count >= maxCountBeforeWidening; - if (shouldWiden) { - blockCount.put(b, 0); - } else { - blockCount.put(b, count + 1); - } - } - - switch (kind) { - case THEN: - { - // Update the then store - S newThenStore = mergeStores(s, thenStore, shouldWiden); - if (!newThenStore.equals(thenStore)) { - thenStores.put(b, newThenStore); - if (elseStore != null) { - inputs.put(b, new TransferInput<>(node, this, newThenStore, elseStore)); - addBlockToWorklist = true; - } - } - break; - } - case ELSE: - { - // Update the else store - S newElseStore = mergeStores(s, elseStore, shouldWiden); - if (!newElseStore.equals(elseStore)) { - elseStores.put(b, newElseStore); - if (thenStore != null) { - inputs.put(b, new TransferInput<>(node, this, thenStore, newElseStore)); - addBlockToWorklist = true; - } - } - break; - } - case BOTH: - if (thenStore == elseStore) { - // Currently there is only one regular store - S newStore = mergeStores(s, thenStore, shouldWiden); - if (!newStore.equals(thenStore)) { - thenStores.put(b, newStore); - elseStores.put(b, newStore); - inputs.put(b, new TransferInput<>(node, this, newStore)); - addBlockToWorklist = true; - } - } else { - boolean storeChanged = false; - - S newThenStore = mergeStores(s, thenStore, shouldWiden); - if (!newThenStore.equals(thenStore)) { - thenStores.put(b, newThenStore); - storeChanged = true; - } - - S newElseStore = mergeStores(s, elseStore, shouldWiden); - if (!newElseStore.equals(elseStore)) { - elseStores.put(b, newElseStore); - storeChanged = true; - } - - if (storeChanged) { - inputs.put(b, new TransferInput<>(node, this, newThenStore, newElseStore)); - addBlockToWorklist = true; - } - } - } - - if (addBlockToWorklist) { - addToWorklist(b); - } - } - - /** Merge two stores, possibly widening the result. */ - private S mergeStores(S newStore, @Nullable S previousStore, boolean shouldWiden) { - if (previousStore == null) { - return newStore; - } else if (shouldWiden) { - return newStore.widenedUpperBound(previousStore); - } else { - return newStore.leastUpperBound(previousStore); - } - } - - /** - * A worklist is a priority queue of blocks in which the order is given by depth-first ordering - * to place non-loop predecessors ahead of successors. - */ - protected static class Worklist { - - /** Map all blocks in the CFG to their depth-first order. */ - protected final IdentityHashMap depthFirstOrder; - - /** Comparator to allow priority queue to order blocks by their depth-first order. */ - public class DFOComparator implements Comparator { - @SuppressWarnings("unboxing.of.nullable") - @Override - public int compare(Block b1, Block b2) { - return depthFirstOrder.get(b1) - depthFirstOrder.get(b2); - } - } - - /** The backing priority queue. */ - protected final PriorityQueue queue; - - public Worklist() { - depthFirstOrder = new IdentityHashMap<>(); - queue = new PriorityQueue<>(11, new DFOComparator()); - } - - public void process(ControlFlowGraph cfg) { - depthFirstOrder.clear(); - int count = 1; - for (Block b : cfg.getDepthFirstOrderedBlocks()) { - depthFirstOrder.put(b, count++); - } - - queue.clear(); - } - - /** - * See {@link PriorityQueue#isEmpty}. - * - * @see PriorityQueue#isEmpty - */ - @EnsuresNonNullIf(result = false, expression = "poll()") - @SuppressWarnings("nullness:contracts.conditional.postcondition.not.satisfied") // forwarded - public boolean isEmpty() { - return queue.isEmpty(); - } - - public boolean contains(Block block) { - return queue.contains(block); - } - - public void add(Block block) { - queue.add(block); - } - - /** - * See {@link PriorityQueue#poll}. - * - * @see PriorityQueue#poll - */ - public @Nullable Block poll() { - return queue.poll(); - } - - @Override - public String toString() { - return "Worklist(" + queue + ")"; - } - } - - /** - * Read the {@link TransferInput} for a particular basic block (or {@code null} if none exists - * yet). + * The result of running the analysis. This is only available once the analysis finished + * running. * - * @param b a basic block - * @return the transfer input for the basic block + * @return the result of running the analysis */ - public @Nullable TransferInput getInput(Block b) { - return getInputBefore(b); - } + AnalysisResult getResult(); /** - * Returns the transfer input corresponding to the location right before the basic block {@code - * b}. + * Get the transfer function of this analysis. * - * @param b a basic block - * @return the transfer input corresponding to the location right before the basic block {@code - * b} + * @return the transfer function of this analysis */ - protected @Nullable TransferInput getInputBefore(Block b) { - return inputs.get(b); - } + @Nullable T getTransferFunction(); /** - * Returns the store corresponding to the location right before the basic block {@code b}. + * Get the transfer input of a given {@link Block} b. * - * @return the store corresponding to the location right before the basic block {@code b} + * @param b a given Block + * @return the transfer input of this Block */ - protected @Nullable S getStoreBefore(Block b, Store.Kind kind) { - switch (kind) { - case THEN: - return readFromStore(thenStores, b); - case ELSE: - return readFromStore(elseStores, b); - default: - assert false; - return null; - } - } - - /** - * Read the {@link Store} for a particular basic block from a map of stores (or {@code null} if - * none exists yet). - */ - protected static @Nullable S readFromStore(Map stores, Block b) { - return stores.get(b); - } - - /** Is the analysis currently running? */ - public boolean isRunning() { - return isRunning; - } + @Nullable TransferInput getInput(Block b); /** - * Returns the abstract value for node {@code n}, or {@code null} if no information is + * Returns the abstract value for {@link Node} {@code n}, or {@code null} if no information is * available. Note that if the analysis has not finished yet, this value might not represent the * final value for this node. * - * @param n a node + * @param n n a node * @return the abstract value for node {@code n}, or {@code null} if no information is available */ - public @Nullable V getValue(Node n) { - if (isRunning) { - // we do not yet have a org.checkerframework.dataflow fact about the current node - if (currentNode == null - || currentNode == n - || (currentTree != null && currentTree == n.getTree())) { - return null; - } - // check that 'n' is a subnode of 'node'. Check immediate operands - // first for efficiency. - assert !n.isLValue() : "Did not expect an lvalue, but got " + n; - if (currentNode == n - || (!currentNode.getOperands().contains(n) - && !currentNode.getTransitiveOperands().contains(n))) { - return null; - } - return nodeValues.get(n); - } - return nodeValues.get(n); - } - - /** - * Return all current node values. - * - * @return all current node values - */ - public IdentityHashMap getNodeValues() { - return nodeValues; - } - - /** - * Set all current node values to the given map. - * - * @param in the current node values - */ - /*package-private*/ void setNodeValues(IdentityHashMap in) { - assert !isRunning; - nodeValues.clear(); - nodeValues.putAll(in); - } - - /** - * Returns the abstract value for {@code t}, or {@code null} if no information is available. - * Note that if the analysis has not finished yet, this value might not represent the final - * value for this node. - * - * @param t a tree - * @return the abstract value for {@code t}, or {@code null} if no information is available - */ - public @Nullable V getValue(Tree t) { - // we do not yet have a org.checkerframework.dataflow fact about the current node - if (t == currentTree) { - return null; - } - Set nodesCorrespondingToTree = getNodesForTree(t); - if (nodesCorrespondingToTree == null) { - return null; - } - V merged = null; - for (Node aNode : nodesCorrespondingToTree) { - if (aNode.isLValue()) { - return null; - } - V a = getValue(aNode); - if (merged == null) { - merged = a; - } else if (a != null) { - merged = merged.leastUpperBound(a); - } - } - return merged; - } - - /** - * Get the set of {@link Node}s for a given {@link Tree}. Returns null for trees that don't - * produce a value. - */ - public @Nullable Set getNodesForTree(Tree t) { - if (cfg == null) { - return null; - } - Set nodes = cfg.getNodesCorrespondingToTree(t); - return nodes; - } - - /** - * Get the {@link MethodTree} of the current CFG if the argument {@link Tree} maps to a {@link - * Node} in the CFG or null otherwise. - */ - public @Nullable MethodTree getContainingMethod(Tree t) { - if (cfg == null) { - return null; - } - MethodTree mt = cfg.getContainingMethod(t); - return mt; - } - - /** - * Get the {@link ClassTree} of the current CFG if the argument {@link Tree} maps to a {@link - * Node} in the CFG or null otherwise. - */ - public @Nullable ClassTree getContainingClass(Tree t) { - if (cfg == null) { - return null; - } - ClassTree ct = cfg.getContainingClass(t); - return ct; - } - - /** - * The transfer results for each return node in the CFG. - * - * @return the transfer results for each return node in the CFG - */ - @RequiresNonNull("cfg") - public List>> getReturnStatementStores() { - assert cfg != null : "@AssumeAssertion(nullness): invariant"; - List>> result = new ArrayList<>(); - for (ReturnNode returnNode : cfg.getReturnNodes()) { - TransferResult store = storesAtReturnStatements.get(returnNode); - result.add(Pair.of(returnNode, store)); - } - return result; - } - - /** - * The result of running the analysis. This is only available once the analysis finished - * running. - * - * @return the result of running the analysis - */ - @RequiresNonNull("cfg") - public AnalysisResult getResult() { - assert !isRunning; - assert cfg != null : "@AssumeAssertion(nullness): invariant"; - return new AnalysisResult<>( - nodeValues, - inputs, - cfg.getTreeLookup(), - cfg.getUnaryAssignNodeLookup(), - finalLocalValues); - } + @Nullable V getValue(Node n); /** * Returns the regular exit store, or {@code null}, if there is no such store (because the @@ -901,32 +123,12 @@ public AnalysisResult getResult() { * @return the regular exit store, or {@code null}, if there is no such store (because the * method cannot exit through the regular exit block) */ - @RequiresNonNull("cfg") - public @Nullable S getRegularExitStore() { - assert cfg != null : "@AssumeAssertion(nullness): invariant"; - SpecialBlock regularExitBlock = cfg.getRegularExitBlock(); - if (inputs.containsKey(regularExitBlock)) { - S regularExitStore = inputs.get(regularExitBlock).getRegularStore(); - return regularExitStore; - } else { - return null; - } - } + @Nullable S getRegularExitStore(); /** * Returns the exceptional exit store. * * @return the exceptional exit store */ - @RequiresNonNull("cfg") - public @Nullable S getExceptionalExitStore() { - assert cfg != null : "@AssumeAssertion(nullness): invariant"; - SpecialBlock exceptionalExitBlock = cfg.getExceptionalExitBlock(); - if (inputs.containsKey(exceptionalExitBlock)) { - S exceptionalExitStore = inputs.get(exceptionalExitBlock).getRegularStore(); - return exceptionalExitStore; - } else { - return null; - } - } + @Nullable S getExceptionalExitStore(); } diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AnalysisResult.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AnalysisResult.java index be99da90a88..e809776792b 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AnalysisResult.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/AnalysisResult.java @@ -4,6 +4,7 @@ import com.sun.source.tree.UnaryTree; import java.util.HashMap; import java.util.IdentityHashMap; +import java.util.List; import java.util.Map; import java.util.Set; import javax.lang.model.element.Element; @@ -127,7 +128,12 @@ public void combine(AnalysisResult other) { finalLocalValues.putAll(other.finalLocalValues); } - // Merge all entries from otherTreeLookup into treeLookup. Merge sets if already present. + /** + * Merge all entries from otherTreeLookup into treeLookup. Merge sets if already present. + * + * @param treeLookup a map from abstract syntax trees to sets of nodes + * @param otherTreeLookup another treeLookup that will be merged into {@code treeLookup} + */ private static void mergeTreeLookup( IdentityHashMap> treeLookup, IdentityHashMap> otherTreeLookup) { @@ -152,7 +158,8 @@ public HashMap getFinalLocalValues() { /** * Returns the abstract value for {@link Node} {@code n}, or {@code null} if no information is - * available. + * available. Note that if the analysis has not finished yet, this value might not represent the + * final value for this node. * * @param n a node * @return the abstract value for {@link Node} {@code n}, or {@code null} if no information is @@ -164,7 +171,8 @@ public HashMap getFinalLocalValues() { /** * Returns the abstract value for {@link Tree} {@code t}, or {@code null} if no information is - * available. + * available. Note that if the analysis has not finished yet, this value might not represent the + * final value for this node. * * @param t a tree * @return the abstract value for {@link Tree} {@code t}, or {@code null} if no information is @@ -205,6 +213,7 @@ public HashMap getFinalLocalValues() { * Callers of this method should always iterate through the returned set, possibly ignoring all * {@code Node}s they are not interested in. * + * @param tree a tree * @return the set of {@link Node}s for a given {@link Tree} */ public @Nullable Set getNodesForTree(Tree tree) { @@ -212,14 +221,14 @@ public HashMap getFinalLocalValues() { } /** - * Return the corresponding {@link AssignmentNode} for a given {@link UnaryTree}. + * Returns the corresponding {@link AssignmentNode} for a given {@link UnaryTree}. * * @param tree a unary tree * @return the corresponding assignment node */ public AssignmentNode getAssignForUnaryTree(UnaryTree tree) { if (!unaryAssignNodeLookup.containsKey(tree)) { - throw new Error(tree + " is not in unaryAssignNodeLookup"); + throw new BugInCF(tree + " is not in unaryAssignNodeLookup"); } return unaryAssignNodeLookup.get(tree); } @@ -227,6 +236,7 @@ public AssignmentNode getAssignForUnaryTree(UnaryTree tree) { /** * Returns the store immediately before a given {@link Tree}. * + * @param tree a tree * @return the store immediately before a given {@link Tree} */ public @Nullable S getStoreBefore(Tree tree) { @@ -249,15 +259,102 @@ public AssignmentNode getAssignForUnaryTree(UnaryTree tree) { /** * Returns the store immediately before a given {@link Node}. * + * @param node a node * @return the store immediately before a given {@link Node} */ public @Nullable S getStoreBefore(Node node) { return runAnalysisFor(node, true); } + /** + * Returns the regular store immediately before a given {@link Block}. + * + * @param block a block + * @return the store right before the given block + */ + public S getStoreBefore(Block block) { + TransferInput transferInput = stores.get(block); + assert transferInput != null + : "@AssumeAssertion(nullness): transferInput should be non-null"; + Analysis analysis = transferInput.analysis; + switch (analysis.getDirection()) { + case FORWARD: + return transferInput.getRegularStore(); + case BACKWARD: + Node firstNode; + switch (block.getType()) { + case REGULAR_BLOCK: + firstNode = ((RegularBlock) block).getContents().get(0); + break; + case EXCEPTION_BLOCK: + firstNode = ((ExceptionBlock) block).getNode(); + break; + default: + firstNode = null; + } + if (firstNode == null) { + // This block doesn't contains any node, return the store in the transfer input + return transferInput.getRegularStore(); + } + return analysis.runAnalysisFor( + firstNode, true, transferInput, nodeValues, analysisCaches); + default: + throw new BugInCF("Unknown direction: " + analysis.getDirection()); + } + } + + /** + * Returns the regular store immediately after a given block. + * + * @param block a block + * @return the store after the given block + */ + public S getStoreAfter(Block block) { + TransferInput transferInput = stores.get(block); + assert transferInput != null + : "@AssumeAssertion(nullness): transferInput should be non-null"; + Analysis analysis = transferInput.analysis; + switch (analysis.getDirection()) { + case FORWARD: + Node lastNode = getLastNode(block); + if (lastNode == null) { + // This block doesn't contains any node, return the store in the transfer input + return transferInput.getRegularStore(); + } + return analysis.runAnalysisFor( + lastNode, false, transferInput, nodeValues, analysisCaches); + case BACKWARD: + return transferInput.getRegularStore(); + default: + throw new BugInCF("Unknown direction: " + analysis.getDirection()); + } + } + + /** + * Returns the last node of the given block, or {@code null} if none. + * + * @param block the block + * @return the last node of this block or {@code null} + */ + protected @Nullable Node getLastNode(Block block) { + switch (block.getType()) { + case REGULAR_BLOCK: + List blockContents = ((RegularBlock) block).getContents(); + return blockContents.get(blockContents.size() - 1); + case CONDITIONAL_BLOCK: + case SPECIAL_BLOCK: + return null; + case EXCEPTION_BLOCK: + return ((ExceptionBlock) block).getNode(); + default: + throw new BugInCF("Unrecognized block type: " + block.getType()); + } + } + /** * Returns the store immediately after a given {@link Tree}. * + * @param tree a tree * @return the store immediately after a given {@link Tree} */ public @Nullable S getStoreAfter(Tree tree) { @@ -280,6 +377,7 @@ public AssignmentNode getAssignForUnaryTree(UnaryTree tree) { /** * Returns the store immediately after a given {@link Node}. * + * @param node a node * @return the store immediately after a given {@link Node} */ public @Nullable S getStoreAfter(Node node) { @@ -293,6 +391,13 @@ public AssignmentNode getAssignForUnaryTree(UnaryTree tree) { * *

If the given {@link Node} cannot be reached (in the control flow graph), then {@code null} * is returned. + * + * @param node the node to analyze + * @param before the boolean value to indicate which store to return (if it is true, return the + * store immediately before {@code node}; otherwise, the store after {@code node} is + * returned) + * @return the store before or after {@code node} (depends on the value of {@code before}) after + * running the analysis */ protected @Nullable S runAnalysisFor(Node node, boolean before) { Block block = node.getBlock(); @@ -307,21 +412,24 @@ public AssignmentNode getAssignForUnaryTree(UnaryTree tree) { /** * Runs the analysis again within the block of {@code node} and returns the store at the * location of {@code node}. If {@code before} is true, then the store immediately before the - * {@link Node} {@code node} is returned. Otherwise, the store after {@code node} is returned. - * If {@code analysisCaches} is not null, this method uses a cache. {@code analysisCaches} is a - * map to a cache for analysis result from an input of the block of the node. If the cache for - * {@code transferInput} is not in {@code analysisCaches}, this method create new cache and - * store it in {@code analysisCaches}. The cache is a map from a node to the analysis result of - * the node. + * {@link Node} {@code node} is returned. Otherwise, the store immediately after {@code node} is + * returned. If {@code analysisCaches} is not null, this method uses a cache. {@code + * analysisCaches} is a map of a block of node to the cached analysis result. If the cache for + * {@code transferInput} is not in {@code analysisCaches}, this method creates new cache and + * stores it in {@code analysisCaches}. The cache is a map of nodes to the analysis results of + * the nodes. * * @param the abstract value type to be tracked by the analysis * @param the store type used in the analysis - * @param node a node - * @param before indicate before or after the node + * @param node the node to analyze + * @param before the boolean value to indicate which store to return (if it is true, return the + * store immediately before {@code node}; otherwise, the store after {@code node} is + * returned) * @param transferInput a transfer input * @param nodeValues {@link #nodeValues} * @param analysisCaches {@link #analysisCaches} - * @return store immediately before or after the given node + * @return the store before or after {@code node} (depends on the value of {@code before}) after + * running the analysis */ public static , S extends Store> S runAnalysisFor( Node node, @@ -329,87 +437,10 @@ public static , S extends Store> S runAnalysisFor( TransferInput transferInput, IdentityHashMap nodeValues, Map, IdentityHashMap>> analysisCaches) { - assert node != null; - Block block = node.getBlock(); - assert block != null : "@AssumeAssertion(nullness): invariant"; - assert transferInput != null; - Analysis analysis = transferInput.analysis; - Node oldCurrentNode = analysis.currentNode; - - // Prepare cache - IdentityHashMap> cache; - if (analysisCaches != null) { - cache = analysisCaches.get(transferInput); - if (cache == null) { - cache = new IdentityHashMap<>(); - analysisCaches.put(transferInput, cache); - } - } else { - cache = null; - } - - if (analysis.isRunning) { - assert analysis.currentInput != null : "@AssumeAssertion(nullness): invariant"; - return analysis.currentInput.getRegularStore(); - } - analysis.setNodeValues(nodeValues); - analysis.isRunning = true; - try { - switch (block.getType()) { - case REGULAR_BLOCK: - { - RegularBlock rb = (RegularBlock) block; - - // Apply transfer function to contents until we found the node we are - // looking for. - TransferInput store = transferInput; - TransferResult transferResult = null; - for (Node n : rb.getContents()) { - analysis.currentNode = n; - if (n == node && before) { - return store.getRegularStore(); - } - if (cache != null && cache.containsKey(n)) { - transferResult = cache.get(n); - } else { - // Copy the store not to change the state in the cache - transferResult = analysis.callTransferFunction(n, store.copy()); - if (cache != null) { - cache.put(n, transferResult); - } - } - if (n == node) { - return transferResult.getRegularStore(); - } - store = new TransferInput<>(n, analysis, transferResult); - } - // This point should never be reached. If the block of 'node' is - // 'block', then 'node' must be part of the contents of 'block'. - throw new BugInCF("Unexpected code"); - } - - case EXCEPTION_BLOCK: - { - ExceptionBlock eb = (ExceptionBlock) block; - - // apply transfer function to content - assert eb.getNode() == node; - if (before) { - return transferInput.getRegularStore(); - } - analysis.currentNode = node; - TransferResult transferResult = - analysis.callTransferFunction(node, transferInput); - return transferResult.getRegularStore(); - } - - default: - // Only regular blocks and exceptional blocks can hold nodes. - throw new BugInCF("Unexpected code"); - } - } finally { - analysis.currentNode = oldCurrentNode; - analysis.isRunning = false; + if (transferInput.analysis == null) { + throw new BugInCF("Analysis in transferInput cannot be null."); } + return transferInput.analysis.runAnalysisFor( + node, before, transferInput, nodeValues, analysisCaches); } } diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardAnalysis.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardAnalysis.java new file mode 100644 index 00000000000..c59110d61f4 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardAnalysis.java @@ -0,0 +1,27 @@ +package org.checkerframework.dataflow.analysis; + +import org.checkerframework.checker.nullness.qual.Nullable; + +/** + * This interface defines a backward analysis, given a control flow graph and a backward transfer + * function. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + * @param the backward transfer function type that is used to approximate runtime behavior + */ +public interface BackwardAnalysis< + V extends AbstractValue, + S extends Store, + T extends BackwardTransferFunction> + extends Analysis { + + /** + * Get the output store at the entry block of a given control flow graph. For a backward + * analysis, the output store contains the analyzed flow information from the exit block to the + * entry block. + * + * @return the output store at the entry block of a given control flow graph + */ + @Nullable S getEntryStore(); +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardAnalysisImpl.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardAnalysisImpl.java new file mode 100644 index 00000000000..888b0a801d7 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardAnalysisImpl.java @@ -0,0 +1,390 @@ +package org.checkerframework.dataflow.analysis; + +import java.util.IdentityHashMap; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; +import org.checkerframework.dataflow.analysis.Store.FlowRule; +import org.checkerframework.dataflow.cfg.ControlFlowGraph; +import org.checkerframework.dataflow.cfg.UnderlyingAST; +import org.checkerframework.dataflow.cfg.block.Block; +import org.checkerframework.dataflow.cfg.block.BlockImpl; +import org.checkerframework.dataflow.cfg.block.ConditionalBlock; +import org.checkerframework.dataflow.cfg.block.ExceptionBlock; +import org.checkerframework.dataflow.cfg.block.RegularBlock; +import org.checkerframework.dataflow.cfg.block.SpecialBlock; +import org.checkerframework.dataflow.cfg.block.SpecialBlock.SpecialBlockType; +import org.checkerframework.dataflow.cfg.node.Node; +import org.checkerframework.dataflow.cfg.node.ReturnNode; +import org.checkerframework.javacutil.BugInCF; + +/** + * An implementation of a backward analysis to solve a org.checkerframework.dataflow problem given a + * control flow graph and a backward transfer function. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + * @param the transfer function type that is used to approximate runtime behavior + */ +public class BackwardAnalysisImpl< + V extends AbstractValue, + S extends Store, + T extends BackwardTransferFunction> + extends AbstractAnalysis implements BackwardAnalysis { + + // TODO: Add widening support like what the forward analysis does. + + /** Out stores after every basic block (assumed to be 'no information' if not present). */ + protected final IdentityHashMap outStores; + + /** + * Exception store of an exception block, propagated by exceptional successors of its exception + * block, and merged with the normal {@link TransferResult}. + */ + protected final IdentityHashMap exceptionStores; + + /** The store right before the entry block. */ + protected @Nullable S storeAtEntry; + + // `@code`, not `@link`, because dataflow module doesn't depend on framework module. + /** + * Construct an object that can perform a org.checkerframework.dataflow backward analysis over a + * control flow graph. The transfer function is set by the subclass, e.g., {@code + * org.checkerframework.framework.flow.CFAbstractAnalysis}, later. + */ + public BackwardAnalysisImpl() { + super(Direction.BACKWARD); + this.outStores = new IdentityHashMap<>(); + this.exceptionStores = new IdentityHashMap<>(); + this.storeAtEntry = null; + } + + /** + * Construct an object that can perform a org.checkerframework.dataflow backward analysis over a + * control flow graph given a transfer function. + * + * @param transfer the transfer function + */ + public BackwardAnalysisImpl(@Nullable T transfer) { + this(); + this.transferFunction = transfer; + } + + @Override + public void performAnalysis(ControlFlowGraph cfg) { + if (isRunning) { + throw new BugInCF( + "performAnalysis() shouldn't be called when the analysis is running."); + } + isRunning = true; + try { + init(cfg); + while (!worklist.isEmpty()) { + Block b = worklist.poll(); + performAnalysisBlock(b); + } + } finally { + assert isRunning; + // In case performAnalysisBlock crashed, reset isRunning to false. + isRunning = false; + } + } + + @Override + public void performAnalysisBlock(Block b) { + switch (b.getType()) { + case REGULAR_BLOCK: + { + RegularBlock rb = (RegularBlock) b; + TransferInput inputAfter = getInput(rb); + assert inputAfter != null : "@AssumeAssertion(nullness): invariant"; + currentInput = inputAfter.copy(); + Node firstNode = null; + boolean addToWorklistAgain = false; + List nodeList = rb.getContents(); + ListIterator reverseIter = nodeList.listIterator(nodeList.size()); + while (reverseIter.hasPrevious()) { + Node node = reverseIter.previous(); + assert currentInput != null : "@AssumeAssertion(nullness): invariant"; + TransferResult transferResult = + callTransferFunction(node, currentInput); + addToWorklistAgain |= updateNodeValues(node, transferResult); + currentInput = new TransferInput<>(node, this, transferResult); + firstNode = node; + } + // Propagate store to predecessors + for (BlockImpl pred : rb.getPredecessors()) { + assert currentInput != null : "@AssumeAssertion(nullness): invariant"; + propagateStoresTo( + pred, + firstNode, + currentInput, + FlowRule.EACH_TO_EACH, + addToWorklistAgain); + } + break; + } + case EXCEPTION_BLOCK: + { + ExceptionBlock eb = (ExceptionBlock) b; + TransferInput inputAfter = getInput(eb); + assert inputAfter != null : "@AssumeAssertion(nullness): invariant"; + currentInput = inputAfter.copy(); + Node node = eb.getNode(); + TransferResult transferResult = callTransferFunction(node, currentInput); + boolean addToWorklistAgain = updateNodeValues(node, transferResult); + // Merge transferResult with exceptionStore if there exists one + S exceptionStore = exceptionStores.get(eb); + S mergedStore = + exceptionStore != null + ? transferResult + .getRegularStore() + .leastUpperBound(exceptionStore) + : transferResult.getRegularStore(); + for (BlockImpl pred : eb.getPredecessors()) { + addStoreAfter(pred, node, mergedStore, addToWorklistAgain); + } + break; + } + case CONDITIONAL_BLOCK: + { + ConditionalBlock cb = (ConditionalBlock) b; + TransferInput inputAfter = getInput(cb); + assert inputAfter != null : "@AssumeAssertion(nullness): invariant"; + TransferInput input = inputAfter.copy(); + for (BlockImpl pred : cb.getPredecessors()) { + propagateStoresTo(pred, null, input, FlowRule.EACH_TO_EACH, false); + } + break; + } + case SPECIAL_BLOCK: + { + // Special basic blocks are empty and cannot throw exceptions, + // thus there is no need to perform any analysis. + SpecialBlock sb = (SpecialBlock) b; + final SpecialBlockType sType = sb.getSpecialType(); + if (sType == SpecialBlockType.ENTRY) { + // storage the store at entry + storeAtEntry = outStores.get(sb); + } else { + assert sType == SpecialBlockType.EXIT + || sType == SpecialBlockType.EXCEPTIONAL_EXIT; + TransferInput input = getInput(sb); + assert input != null : "@AssumeAssertion(nullness): invariant"; + for (BlockImpl pred : sb.getPredecessors()) { + propagateStoresTo(pred, null, input, FlowRule.EACH_TO_EACH, false); + } + } + break; + } + default: + throw new BugInCF("Unexpected block type: " + b.getType()); + } + } + + @Override + public @Nullable TransferInput getInput(Block b) { + return inputs.get(b); + } + + @Override + public @Nullable S getEntryStore() { + return storeAtEntry; + } + + @Override + protected void initFields(ControlFlowGraph cfg) { + super.initFields(cfg); + outStores.clear(); + exceptionStores.clear(); + // storeAtEntry is null before analysis begin + storeAtEntry = null; + } + + @Override + @RequiresNonNull("cfg") + protected void initInitialInputs() { + worklist.process(cfg); + SpecialBlock regularExitBlock = cfg.getRegularExitBlock(); + SpecialBlock exceptionExitBlock = cfg.getExceptionalExitBlock(); + if (worklist.depthFirstOrder.get(regularExitBlock) == null + && worklist.depthFirstOrder.get(exceptionExitBlock) == null) { + throw new BugInCF( + "regularExitBlock and exceptionExitBlock should never both be null at the same time."); + } + UnderlyingAST underlyingAST = cfg.getUnderlyingAST(); + List returnNodes = cfg.getReturnNodes(); + assert transferFunction != null : "@AssumeAssertion(nullness): invariant"; + S normalInitialStore = transferFunction.initialNormalExitStore(underlyingAST, returnNodes); + S exceptionalInitialStore = transferFunction.initialExceptionalExitStore(underlyingAST); + // If regularExitBlock or exceptionExitBlock is reachable in the control flow graph, then + // initialize it as a start point of the analysis. + if (worklist.depthFirstOrder.get(regularExitBlock) != null) { + worklist.add(regularExitBlock); + inputs.put(regularExitBlock, new TransferInput<>(null, this, normalInitialStore)); + outStores.put(regularExitBlock, normalInitialStore); + } + if (worklist.depthFirstOrder.get(exceptionExitBlock) != null) { + worklist.add(exceptionExitBlock); + inputs.put( + exceptionExitBlock, new TransferInput<>(null, this, exceptionalInitialStore)); + outStores.put(exceptionExitBlock, exceptionalInitialStore); + } + if (worklist.isEmpty()) { + throw new BugInCF("The worklist needs at least one exit block as starting point."); + } + if (inputs.isEmpty() || outStores.isEmpty()) { + throw new BugInCF("At least one input and one output store are required."); + } + } + + @Override + protected void propagateStoresTo( + Block pred, + @Nullable Node node, + TransferInput currentInput, + FlowRule flowRule, + boolean addToWorklistAgain) { + if (flowRule != FlowRule.EACH_TO_EACH) { + throw new BugInCF( + "Backward analysis always propagates EACH to EACH, because there is no control flow."); + } + + addStoreAfter(pred, node, currentInput.getRegularStore(), addToWorklistAgain); + } + + /** + * Add a store after the basic block {@code pred} by merging with the existing stores for that + * location. + * + * @param pred the basic block + * @param node the node of the basic block {@code b} + * @param s the store being added + * @param addBlockToWorklist whether the basic block {@code b} should be added back to {@code + * Worklist} + */ + protected void addStoreAfter(Block pred, @Nullable Node node, S s, boolean addBlockToWorklist) { + // If the block pred is an exception block, decide whether the block of passing node is an + // exceptional successor of the block pred + if (pred instanceof ExceptionBlock + && ((ExceptionBlock) pred).getSuccessor() != null + && node != null) { + @Nullable Block succBlock = ((ExceptionBlock) pred).getSuccessor(); + @Nullable Block block = node.getBlock(); + if (succBlock != null && block != null && succBlock.getId() == block.getId()) { + // If the block of passing node is an exceptional successor of Block pred, propagate + // store to the exceptionStores. Currently it doesn't track the label of an + // exceptional edge from exception block to its exceptional successors in backward + // direction. Instead, all exception stores of exceptional successors of an + // exception block will merge to one exception store at the exception block + ExceptionBlock ebPred = (ExceptionBlock) pred; + S exceptionStore = exceptionStores.get(ebPred); + S newExceptionStore = + (exceptionStore != null) ? exceptionStore.leastUpperBound(s) : s; + if (!newExceptionStore.equals(exceptionStore)) { + exceptionStores.put(ebPred, newExceptionStore); + addBlockToWorklist = true; + } + } + } else { + S predOutStore = getStoreAfter(pred); + S newPredOutStore = (predOutStore != null) ? predOutStore.leastUpperBound(s) : s; + if (!newPredOutStore.equals(predOutStore)) { + outStores.put(pred, newPredOutStore); + inputs.put(pred, new TransferInput<>(node, this, newPredOutStore)); + addBlockToWorklist = true; + } + } + if (addBlockToWorklist) { + addToWorklist(pred); + } + } + + /** + * Returns the store corresponding to the location right after the basic block {@code b}. + * + * @param b the given block + * @return the store right after the given block + */ + protected @Nullable S getStoreAfter(Block b) { + return readFromStore(outStores, b); + } + + @Override + public S runAnalysisFor( + Node node, + boolean before, + TransferInput transferInput, + IdentityHashMap nodeValues, + Map, IdentityHashMap>> analysisCaches) { + Block block = node.getBlock(); + assert block != null : "@AssumeAssertion(nullness): invariant"; + Node oldCurrentNode = currentNode; + if (isRunning) { + assert currentInput != null : "@AssumeAssertion(nullness): invariant"; + return currentInput.getRegularStore(); + } + isRunning = true; + try { + switch (block.getType()) { + case REGULAR_BLOCK: + { + RegularBlock rBlock = (RegularBlock) block; + // Apply transfer function to contents until we found the node we are + // looking for. + TransferInput store = transferInput; + List nodeList = rBlock.getContents(); + ListIterator reverseIter = nodeList.listIterator(nodeList.size()); + while (reverseIter.hasPrevious()) { + Node n = reverseIter.previous(); + currentNode = n; + if (n == node && !before) { + return store.getRegularStore(); + } + // Copy the store to preserve to change the state in {@link #inputs} + TransferResult transferResult = + callTransferFunction(n, store.copy()); + if (n == node) { + return transferResult.getRegularStore(); + } + store = new TransferInput<>(n, this, transferResult); + } + // This point should never be reached. If the block of 'node' is + // 'block', then 'node' must be part of the contents of 'block'. + throw new BugInCF("This point should never be reached."); + } + case EXCEPTION_BLOCK: + { + ExceptionBlock eb = (ExceptionBlock) block; + if (eb.getNode() != node) { + throw new BugInCF( + "Node should be equal to eb.getNode(). But get: node: " + + node + + "\teb.getNode(): " + + eb.getNode()); + } + if (!before) { + return transferInput.getRegularStore(); + } + currentNode = node; + TransferResult transferResult = + callTransferFunction(node, transferInput); + // Merge transfer result with the exception store of this exceptional block + S exceptionStore = exceptionStores.get(eb); + return exceptionStore == null + ? transferResult.getRegularStore() + : transferResult.getRegularStore().leastUpperBound(exceptionStore); + } + default: + // Only regular blocks and exceptional blocks can hold nodes. + throw new BugInCF("Unexpected block type: " + block.getType()); + } + + } finally { + currentNode = oldCurrentNode; + isRunning = false; + } + } +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardTransferFunction.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardTransferFunction.java new file mode 100644 index 00000000000..118fcc01606 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/BackwardTransferFunction.java @@ -0,0 +1,40 @@ +package org.checkerframework.dataflow.analysis; + +import java.util.List; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.checkerframework.dataflow.cfg.UnderlyingAST; +import org.checkerframework.dataflow.cfg.node.ReturnNode; + +/** + * Interface of a backward transfer function for the abstract interpretation used for the backward + * flow analysis. + * + *

Important: The individual transfer functions ( {@code visit*}) are allowed to use + * (and modify) the stores contained in the argument passed; the ownership is transferred from the + * caller to that function. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + */ +public interface BackwardTransferFunction, S extends Store> + extends TransferFunction { + + /** + * Returns the initial store that should be used at the normal exit block. + * + * @param underlyingAST the underlying AST of the given control flow graph + * @param returnNodes the return nodes of the given control flow graph if the underlying AST of + * this graph is a method. Otherwise will be set to {@code null} + * @return the initial store that should be used at the normal exit block + */ + S initialNormalExitStore(UnderlyingAST underlyingAST, @Nullable List returnNodes); + + /** + * Returns the initial store that should be used at the exceptional exit block or given the + * underlying AST of a control flow graph. + * + * @param underlyingAST the underlying AST of the given control flow graph + * @return the initial store that should be used at the exceptional exit block + */ + S initialExceptionalExitStore(UnderlyingAST underlyingAST); +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardAnalysis.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardAnalysis.java new file mode 100644 index 00000000000..362e3e37e15 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardAnalysis.java @@ -0,0 +1,30 @@ +package org.checkerframework.dataflow.analysis; + +import java.util.List; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.checkerframework.dataflow.cfg.node.ReturnNode; +import org.checkerframework.javacutil.Pair; + +/** + * This interface defines a forward analysis, given a control flow graph and a forward transfer + * function. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + * @param the forward transfer function type that is used to approximated runtime behavior + */ +public interface ForwardAnalysis< + V extends AbstractValue, + S extends Store, + T extends ForwardTransferFunction> + extends Analysis { + + /** + * Get stores at return statements. These stores are transfer results at return node. Thus for a + * forward analysis, these stores contain the analyzed flow information from entry nodes to + * return nodes. + * + * @return the transfer results for each return node in the CFG + */ + List>> getReturnStatementStores(); +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardAnalysisImpl.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardAnalysisImpl.java new file mode 100644 index 00000000000..d075f562f68 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardAnalysisImpl.java @@ -0,0 +1,584 @@ +package org.checkerframework.dataflow.analysis; + +import com.sun.source.tree.LambdaExpressionTree; +import com.sun.source.tree.MethodTree; +import com.sun.source.tree.VariableTree; +import java.util.ArrayList; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.lang.model.type.TypeMirror; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; +import org.checkerframework.dataflow.cfg.ControlFlowGraph; +import org.checkerframework.dataflow.cfg.UnderlyingAST; +import org.checkerframework.dataflow.cfg.UnderlyingAST.CFGLambda; +import org.checkerframework.dataflow.cfg.UnderlyingAST.CFGMethod; +import org.checkerframework.dataflow.cfg.UnderlyingAST.Kind; +import org.checkerframework.dataflow.cfg.block.Block; +import org.checkerframework.dataflow.cfg.block.ConditionalBlock; +import org.checkerframework.dataflow.cfg.block.ExceptionBlock; +import org.checkerframework.dataflow.cfg.block.RegularBlock; +import org.checkerframework.dataflow.cfg.block.SpecialBlock; +import org.checkerframework.dataflow.cfg.node.LocalVariableNode; +import org.checkerframework.dataflow.cfg.node.Node; +import org.checkerframework.dataflow.cfg.node.ReturnNode; +import org.checkerframework.javacutil.BugInCF; +import org.checkerframework.javacutil.Pair; + +/** + * An implementation of a forward analysis to solve a org.checkerframework.dataflow problem given a + * control flow graph and a forward transfer function. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + * @param the transfer function type that is used to approximate runtime behavior + */ +public class ForwardAnalysisImpl< + V extends AbstractValue, + S extends Store, + T extends ForwardTransferFunction> + extends AbstractAnalysis implements ForwardAnalysis { + + /** + * Number of times each block has been analyzed since the last time widening was applied. Null + * if maxCountBeforeWidening is -1, which implies widening isn't used for this analysis. + */ + protected final @Nullable IdentityHashMap blockCount; + + /** + * Number of times a block can be analyzed before widening. -1 implies that widening shouldn't + * be used. + */ + protected final int maxCountBeforeWidening; + + /** Then stores before every basic block (assumed to be 'no information' if not present). */ + protected final IdentityHashMap thenStores; + + /** Else stores before every basic block (assumed to be 'no information' if not present). */ + protected final IdentityHashMap elseStores; + + /** The stores after every return statement. */ + protected final IdentityHashMap> storesAtReturnStatements; + + // `@code`, not `@link`, because dataflow module doesn't depend on framework module. + /** + * Construct an object that can perform a org.checkerframework.dataflow forward analysis over a + * control flow graph. The transfer function is set by the subclass, e.g., {@code + * org.checkerframework.framework.flow.CFAbstractAnalysis}, later. + * + * @param maxCountBeforeWidening number of times a block can be analyzed before widening + */ + public ForwardAnalysisImpl(int maxCountBeforeWidening) { + super(Direction.FORWARD); + this.maxCountBeforeWidening = maxCountBeforeWidening; + this.blockCount = maxCountBeforeWidening == -1 ? null : new IdentityHashMap<>(); + this.thenStores = new IdentityHashMap<>(); + this.elseStores = new IdentityHashMap<>(); + this.storesAtReturnStatements = new IdentityHashMap<>(); + } + + /** + * Construct an object that can perform a org.checkerframework.dataflow forward analysis over a + * control flow graph given a transfer function. + * + * @param transfer the transfer function + */ + public ForwardAnalysisImpl(@Nullable T transfer) { + this(-1); + this.transferFunction = transfer; + } + + @Override + public void performAnalysis(ControlFlowGraph cfg) { + if (isRunning) { + throw new BugInCF( + "ForwardAnalysisImpl::performAnalysis() shouldn't be called when the analysis is running."); + } + isRunning = true; + + try { + init(cfg); + while (!worklist.isEmpty()) { + Block b = worklist.poll(); + performAnalysisBlock(b); + } + } finally { + assert isRunning; + // In case performAnalysisBlock crashed, reset isRunning to false. + isRunning = false; + } + } + + @Override + public void performAnalysisBlock(Block b) { + switch (b.getType()) { + case REGULAR_BLOCK: + { + RegularBlock rb = (RegularBlock) b; + // Apply transfer function to contents + TransferInput inputBefore = getInputBefore(rb); + assert inputBefore != null : "@AssumeAssertion(nullness): invariant"; + currentInput = inputBefore.copy(); + Node lastNode = null; + boolean addToWorklistAgain = false; + for (Node n : rb.getContents()) { + assert currentInput != null : "@AssumeAssertion(nullness): invariant"; + TransferResult transferResult = callTransferFunction(n, currentInput); + addToWorklistAgain |= updateNodeValues(n, transferResult); + currentInput = new TransferInput<>(n, this, transferResult); + lastNode = n; + } + assert currentInput != null : "@AssumeAssertion(nullness): invariant"; + // Loop will run at least once, making transferResult non-null + // Propagate store to successors + Block succ = rb.getSuccessor(); + assert succ != null + : "@AssumeAssertion(nullness): regular basic block without non-exceptional successor unexpected"; + propagateStoresTo( + succ, lastNode, currentInput, rb.getFlowRule(), addToWorklistAgain); + break; + } + case EXCEPTION_BLOCK: + { + ExceptionBlock eb = (ExceptionBlock) b; + // Apply transfer function to content + TransferInput inputBefore = getInputBefore(eb); + assert inputBefore != null : "@AssumeAssertion(nullness): invariant"; + currentInput = inputBefore.copy(); + Node node = eb.getNode(); + TransferResult transferResult = callTransferFunction(node, currentInput); + boolean addToWorklistAgain = updateNodeValues(node, transferResult); + // Propagate store to successor + Block succ = eb.getSuccessor(); + if (succ != null) { + currentInput = new TransferInput<>(node, this, transferResult); + // TODO: Variable wasn't used. + // Store.FlowRule storeFlow = eb.getFlowRule(); + propagateStoresTo( + succ, node, currentInput, eb.getFlowRule(), addToWorklistAgain); + } + // Propagate store to exceptional successors + for (Map.Entry> e : + eb.getExceptionalSuccessors().entrySet()) { + TypeMirror cause = e.getKey(); + S exceptionalStore = transferResult.getExceptionalStore(cause); + if (exceptionalStore != null) { + for (Block exceptionSucc : e.getValue()) { + addStoreBefore( + exceptionSucc, + node, + exceptionalStore, + Store.Kind.BOTH, + addToWorklistAgain); + } + } else { + for (Block exceptionSucc : e.getValue()) { + addStoreBefore( + exceptionSucc, + node, + inputBefore.copy().getRegularStore(), + Store.Kind.BOTH, + addToWorklistAgain); + } + } + } + break; + } + case CONDITIONAL_BLOCK: + { + ConditionalBlock cb = (ConditionalBlock) b; + // Get store before + TransferInput inputBefore = getInputBefore(cb); + assert inputBefore != null : "@AssumeAssertion(nullness): invariant"; + TransferInput input = inputBefore.copy(); + // Propagate store to successor + Block thenSucc = cb.getThenSuccessor(); + Block elseSucc = cb.getElseSuccessor(); + propagateStoresTo(thenSucc, null, input, cb.getThenFlowRule(), false); + propagateStoresTo(elseSucc, null, input, cb.getElseFlowRule(), false); + break; + } + case SPECIAL_BLOCK: + { + // Special basic blocks are empty and cannot throw exceptions, + // thus there is no need to perform any analysis. + SpecialBlock sb = (SpecialBlock) b; + Block succ = sb.getSuccessor(); + if (succ != null) { + TransferInput input = getInputBefore(b); + assert input != null : "@AssumeAssertion(nullness): invariant"; + propagateStoresTo(succ, null, input, sb.getFlowRule(), false); + } + break; + } + default: + throw new BugInCF("Unexpected block type: " + b.getType()); + } + } + + @Override + public @Nullable TransferInput getInput(Block b) { + return getInputBefore(b); + } + + @Override + @SuppressWarnings("contracts.precondition.override.invalid") // implementation field + @RequiresNonNull("cfg") + public List>> getReturnStatementStores() { + List>> result = new ArrayList<>(); + for (ReturnNode returnNode : cfg.getReturnNodes()) { + TransferResult store = storesAtReturnStatements.get(returnNode); + result.add(Pair.of(returnNode, store)); + } + return result; + } + + @Override + public S runAnalysisFor( + Node node, + boolean before, + TransferInput transferInput, + IdentityHashMap nodeValues, + Map, IdentityHashMap>> analysisCaches) { + Block block = node.getBlock(); + assert block != null : "@AssumeAssertion(nullness): invariant"; + Node oldCurrentNode = currentNode; + + // Prepare cache + IdentityHashMap> cache; + if (analysisCaches != null) { + cache = analysisCaches.get(transferInput); + if (cache == null) { + cache = new IdentityHashMap<>(); + analysisCaches.put(transferInput, cache); + } + } else { + cache = null; + } + + if (isRunning) { + assert currentInput != null : "@AssumeAssertion(nullness): invariant"; + return currentInput.getRegularStore(); + } + setNodeValues(nodeValues); + isRunning = true; + try { + switch (block.getType()) { + case REGULAR_BLOCK: + { + RegularBlock rb = (RegularBlock) block; + // Apply transfer function to contents until we found the node we are + // looking for. + TransferInput store = transferInput; + TransferResult transferResult; + for (Node n : rb.getContents()) { + currentNode = n; + if (n == node && before) { + return store.getRegularStore(); + } + if (cache != null && cache.containsKey(n)) { + transferResult = cache.get(n); + } else { + // Copy the store to preserve to change the state in the cache + transferResult = callTransferFunction(n, store.copy()); + if (cache != null) { + cache.put(n, transferResult); + } + } + if (n == node) { + return transferResult.getRegularStore(); + } + store = new TransferInput<>(n, this, transferResult); + } + // This point should never be reached. If the block of 'node' is + // 'block', then 'node' must be part of the contents of 'block'. + throw new BugInCF("This point should never be reached."); + } + case EXCEPTION_BLOCK: + { + ExceptionBlock eb = (ExceptionBlock) block; + // Apply the transfer function to content + if (eb.getNode() != node) { + throw new BugInCF( + "Node should be equal to eb.getNode(). But get: node: " + + node + + "\teb.getNode(): " + + eb.getNode()); + } + if (before) { + return transferInput.getRegularStore(); + } + currentNode = node; + TransferResult transferResult = + callTransferFunction(node, transferInput); + return transferResult.getRegularStore(); + } + default: + // Only regular blocks and exceptional blocks can hold nodes. + throw new BugInCF("Unexpected block type: " + block.getType()); + } + } finally { + currentNode = oldCurrentNode; + isRunning = false; + } + } + + @Override + protected void initFields(ControlFlowGraph cfg) { + thenStores.clear(); + elseStores.clear(); + if (blockCount != null) { + blockCount.clear(); + } + storesAtReturnStatements.clear(); + super.initFields(cfg); + } + + @Override + @RequiresNonNull("cfg") + protected void initInitialInputs() { + worklist.process(cfg); + Block entry = cfg.getEntryBlock(); + worklist.add(entry); + List parameters = null; + UnderlyingAST underlyingAST = cfg.getUnderlyingAST(); + if (underlyingAST.getKind() == Kind.METHOD) { + MethodTree tree = ((CFGMethod) underlyingAST).getMethod(); + parameters = new ArrayList<>(); + for (VariableTree p : tree.getParameters()) { + LocalVariableNode var = new LocalVariableNode(p); + parameters.add(var); + // TODO: document that LocalVariableNode has no block that it belongs to + } + } else if (underlyingAST.getKind() == Kind.LAMBDA) { + LambdaExpressionTree lambda = ((CFGLambda) underlyingAST).getLambdaTree(); + parameters = new ArrayList<>(); + for (VariableTree p : lambda.getParameters()) { + LocalVariableNode var = new LocalVariableNode(p); + parameters.add(var); + // TODO: document that LocalVariableNode has no block that it belongs to + } + } + assert transferFunction != null : "@AssumeAssertion(nullness): invariant"; + S initialStore = transferFunction.initialStore(underlyingAST, parameters); + thenStores.put(entry, initialStore); + elseStores.put(entry, initialStore); + inputs.put(entry, new TransferInput<>(null, this, initialStore)); + } + + @Override + protected TransferResult callTransferFunction(Node node, TransferInput input) { + TransferResult transferResult = super.callTransferFunction(node, input); + + if (node instanceof ReturnNode) { + // Save a copy of the store to later check if some property holds at a given return + // statement + storesAtReturnStatements.put((ReturnNode) node, transferResult); + } + return transferResult; + } + + @Override + protected void propagateStoresTo( + Block succ, + @Nullable Node node, + TransferInput currentInput, + Store.FlowRule flowRule, + boolean addToWorklistAgain) { + switch (flowRule) { + case EACH_TO_EACH: + if (currentInput.containsTwoStores()) { + addStoreBefore( + succ, + node, + currentInput.getThenStore(), + Store.Kind.THEN, + addToWorklistAgain); + addStoreBefore( + succ, + node, + currentInput.getElseStore(), + Store.Kind.ELSE, + addToWorklistAgain); + } else { + addStoreBefore( + succ, + node, + currentInput.getRegularStore(), + Store.Kind.BOTH, + addToWorklistAgain); + } + break; + case THEN_TO_BOTH: + addStoreBefore( + succ, + node, + currentInput.getThenStore(), + Store.Kind.BOTH, + addToWorklistAgain); + break; + case ELSE_TO_BOTH: + addStoreBefore( + succ, + node, + currentInput.getElseStore(), + Store.Kind.BOTH, + addToWorklistAgain); + break; + case THEN_TO_THEN: + addStoreBefore( + succ, + node, + currentInput.getThenStore(), + Store.Kind.THEN, + addToWorklistAgain); + break; + case ELSE_TO_ELSE: + addStoreBefore( + succ, + node, + currentInput.getElseStore(), + Store.Kind.ELSE, + addToWorklistAgain); + break; + } + } + + /** + * Add a store before the basic block {@code b} by merging with the existing stores for that + * location. + * + * @param b a basic block + * @param node the node of the basic block {@code b} + * @param s the store being added + * @param kind the kind of store {@code s} + * @param addBlockToWorklist whether the basic block {@code b} should be added back to {@code + * Worklist} + */ + protected void addStoreBefore( + Block b, @Nullable Node node, S s, Store.Kind kind, boolean addBlockToWorklist) { + S thenStore = getStoreBefore(b, Store.Kind.THEN); + S elseStore = getStoreBefore(b, Store.Kind.ELSE); + boolean shouldWiden = false; + if (blockCount != null) { + Integer count = blockCount.get(b); + if (count == null) { + count = 0; + } + shouldWiden = count >= maxCountBeforeWidening; + if (shouldWiden) { + blockCount.put(b, 0); + } else { + blockCount.put(b, count + 1); + } + } + switch (kind) { + case THEN: + { + // Update the then store + S newThenStore = mergeStores(s, thenStore, shouldWiden); + if (!newThenStore.equals(thenStore)) { + thenStores.put(b, newThenStore); + if (elseStore != null) { + inputs.put(b, new TransferInput<>(node, this, newThenStore, elseStore)); + addBlockToWorklist = true; + } + } + break; + } + case ELSE: + { + // Update the else store + S newElseStore = mergeStores(s, elseStore, shouldWiden); + if (!newElseStore.equals(elseStore)) { + elseStores.put(b, newElseStore); + if (thenStore != null) { + inputs.put(b, new TransferInput<>(node, this, thenStore, newElseStore)); + addBlockToWorklist = true; + } + } + break; + } + case BOTH: + if (thenStore == elseStore) { + // Currently there is only one regular store + S newStore = mergeStores(s, thenStore, shouldWiden); + if (!newStore.equals(thenStore)) { + thenStores.put(b, newStore); + elseStores.put(b, newStore); + inputs.put(b, new TransferInput<>(node, this, newStore)); + addBlockToWorklist = true; + } + } else { + boolean storeChanged = false; + S newThenStore = mergeStores(s, thenStore, shouldWiden); + if (!newThenStore.equals(thenStore)) { + thenStores.put(b, newThenStore); + storeChanged = true; + } + S newElseStore = mergeStores(s, elseStore, shouldWiden); + if (!newElseStore.equals(elseStore)) { + elseStores.put(b, newElseStore); + storeChanged = true; + } + if (storeChanged) { + inputs.put(b, new TransferInput<>(node, this, newThenStore, newElseStore)); + addBlockToWorklist = true; + } + } + } + if (addBlockToWorklist) { + addToWorklist(b); + } + } + + /** + * Merge two stores, possibly widening the result. + * + * @param newStore the new Store + * @param previousStore the previous Store + * @param shouldWiden should widen or not + * @return the merged Store + */ + private S mergeStores(S newStore, @Nullable S previousStore, boolean shouldWiden) { + if (previousStore == null) { + return newStore; + } else if (shouldWiden) { + return newStore.widenedUpperBound(previousStore); + } else { + return newStore.leastUpperBound(previousStore); + } + } + + /** + * Return the store corresponding to the location right before the basic block {@code b}. + * + * @param b a block + * @param kind the kind of store which will be returned + * @return the store corresponding to the location right before the basic block {@code b} + */ + protected @Nullable S getStoreBefore(Block b, Store.Kind kind) { + switch (kind) { + case THEN: + return readFromStore(thenStores, b); + case ELSE: + return readFromStore(elseStores, b); + default: + throw new BugInCF("Unexpected Store Kind: " + kind); + } + } + + /** + * Returns the transfer input corresponding to the location right before the basic block {@code + * b}. + * + * @param b a block + * @return the transfer input corresponding to the location right before the basic block {@code + * b} + */ + protected @Nullable TransferInput getInputBefore(Block b) { + return inputs.get(b); + } +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardTransferFunction.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardTransferFunction.java new file mode 100644 index 00000000000..da48b0b2242 --- /dev/null +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/ForwardTransferFunction.java @@ -0,0 +1,31 @@ +package org.checkerframework.dataflow.analysis; + +import java.util.List; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.checkerframework.dataflow.cfg.UnderlyingAST; +import org.checkerframework.dataflow.cfg.node.LocalVariableNode; + +/** + * Interface of a forward transfer function for the abstract interpretation used for the forward + * flow analysis. + * + *

Important: The individual transfer functions ( {@code visit*}) are allowed to use + * (and modify) the stores contained in the argument passed; the ownership is transferred from the + * caller to that function. + * + * @param the abstract value type to be tracked by the analysis + * @param the store type used in the analysis + */ +public interface ForwardTransferFunction, S extends Store> + extends TransferFunction { + + /** + * Returns the initial store to be used by the org.checkerframework.dataflow analysis. {@code + * parameters} is non-null if the underlying AST is a method. + * + * @param underlyingAST an abstract syntax tree + * @param parameters a list of local variable nodes + * @return the initial store + */ + S initialStore(UnderlyingAST underlyingAST, @Nullable List parameters); +} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/RegularTransferResult.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/RegularTransferResult.java index 7260c79a52c..d15ed4bbe4a 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/RegularTransferResult.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/RegularTransferResult.java @@ -86,7 +86,7 @@ public RegularTransferResult( * *

Aliasing: {@code resultStore} and any store in {@code exceptionalStores} are not * allowed to be used anywhere outside of this class (including use through aliases). Complete - * control over the objects is transfered to this class. + * control over the objects is transferred to this class. * * @param value the abstract value produced by the transfer function * @param resultStore {@link #store} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferFunction.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferFunction.java index 25a3e0844d0..2634333a45f 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferFunction.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferFunction.java @@ -1,9 +1,5 @@ package org.checkerframework.dataflow.analysis; -import java.util.List; -import org.checkerframework.checker.nullness.qual.Nullable; -import org.checkerframework.dataflow.cfg.UnderlyingAST; -import org.checkerframework.dataflow.cfg.node.LocalVariableNode; import org.checkerframework.dataflow.cfg.node.Node; import org.checkerframework.dataflow.cfg.node.NodeVisitor; @@ -13,29 +9,24 @@ *

A transfer function consists of the following components: * *

    - *
  • A method {@code initialStore} that determines which initial store should be used in the + *
  • Initial store method(s) that determines which initial store should be used in the * org.checkerframework.dataflow analysis. *
  • A function for every {@link Node} type that determines the behavior of the * org.checkerframework.dataflow analysis in that case. This method takes a {@link Node} and * an incoming store, and produces a {@link RegularTransferResult}. *
* - *

Important: The individual transfer functions ({@code visit*}) are allowed to use (and - * modify) the stores contained in the argument passed; the ownership is transfered from the caller - * to that function. + *

Note: Initial store method(s) are different between forward and backward transfer + * functions. Thus, this interface doesn't define any initial store method(s). {@link + * ForwardTransferFunction} and {@link BackwardTransferFunction} will create their own initial store + * method(s). + * + *

Important: The individual transfer functions ( {@code visit*}) are allowed to use + * (and modify) the stores contained in the argument passed; the ownership is transferred from the + * caller to that function. * * @param type of the abstract value that is tracked * @param the store type used in the analysis */ public interface TransferFunction, S extends Store> - extends NodeVisitor, TransferInput> { - - /** - * Returns the initial store to be used by the org.checkerframework.dataflow analysis. {@code - * parameters} is only set if the underlying AST is a method. - * - * @return the initial store to be used by the org.checkerframework.dataflow analysis. {@code - * parameters} is only set if the underlying AST is a method. - */ - S initialStore(UnderlyingAST underlyingAST, @Nullable List parameters); -} + extends NodeVisitor, TransferInput> {} diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferInput.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferInput.java index 2fe60fad23e..6682873967d 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferInput.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferInput.java @@ -6,8 +6,8 @@ /** * {@code TransferInput} is used as the input type of the individual transfer functions of a {@link - * TransferFunction}. It also contains a reference to the node for which the transfer function will - * be applied. + * ForwardTransferFunction} or a {@link BackwardTransferFunction}. It also contains a reference to + * the node for which the transfer function will be applied. * *

A {@code TransferInput} contains one or two stores. If two stores are present, one belongs to * 'then', and the other to 'else'. diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferResult.java b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferResult.java index 548ac27a435..ba88e4aac22 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferResult.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/analysis/TransferResult.java @@ -97,6 +97,7 @@ public void setResultValue(V resultValue) { * Returns the store that flows along the outgoing exceptional edge labeled with {@code * exception} (or {@code null} if no special handling is required for exceptional edges). * + * @param exception an exception type * @return the store that flows along the outgoing exceptional edge labeled with {@code * exception} (or {@code null} if no special handling is required for exceptional edges) */ diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/AbstractCFGVisualizer.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/AbstractCFGVisualizer.java index ffd31540f40..06e8ada7328 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/AbstractCFGVisualizer.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/AbstractCFGVisualizer.java @@ -15,6 +15,7 @@ import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.dataflow.analysis.AbstractValue; import org.checkerframework.dataflow.analysis.Analysis; +import org.checkerframework.dataflow.analysis.Analysis.Direction; import org.checkerframework.dataflow.analysis.Store; import org.checkerframework.dataflow.analysis.TransferFunction; import org.checkerframework.dataflow.analysis.TransferInput; @@ -25,6 +26,7 @@ import org.checkerframework.dataflow.cfg.block.SingleSuccessorBlock; import org.checkerframework.dataflow.cfg.block.SpecialBlock; import org.checkerframework.dataflow.cfg.node.Node; +import org.checkerframework.javacutil.BugInCF; /** * This abstract class makes implementing a {@link CFGVisualizer} easier. Some of the methods in @@ -196,26 +198,15 @@ protected String visualizeBlockHelper( // Visualize transfer input if necessary. if (analysis != null) { - // The transfer input before this block is added before the block content. - sbBlock.insert(0, visualizeBlockTransferInput(bb, analysis)); + sbBlock.insert(0, visualizeBlockTransferInputBefore(bb, analysis)); if (verbose) { Node lastNode = getLastNode(bb); if (lastNode != null) { - @SuppressWarnings("nullness:contracts.precondition.not.satisfied") - S store = analysis.getResult().getStoreAfter(lastNode); - StringBuilder sbStore = new StringBuilder(); - sbStore.append(escapeString).append("~~~~~~~~~").append(escapeString); - sbStore.append("After: "); - if (store != null) { - sbStore.append(visualizeStore(store)); - } else { - sbStore.append("null store"); - } - sbBlock.append(sbStore); + sbBlock.append(visualizeBlockTransferInputAfter(bb, analysis)); } } } - if (!centered) { + if (!centered || verbose) { sbBlock.append(escapeString); } return sbBlock.toString(); @@ -253,44 +244,58 @@ protected List addBlockContent(Block bb) { case EXCEPTION_BLOCK: return Collections.singletonList(((ExceptionBlock) bb).getNode()); case CONDITIONAL_BLOCK: - return Collections.emptyList(); case SPECIAL_BLOCK: return Collections.emptyList(); default: - throw new Error("Unrecognized basic block type: " + bb.getType()); + throw new BugInCF("Unrecognized basic block type: " + bb.getType()); } } /** - * Visualize the transfer input of a block. + * Visualize the transfer input before the given block. * * @param bb the block * @param analysis the current analysis * @param escapeString the escape String for the special need of visualization, e.g., "\\l" for * {@link DOTCFGVisualizer} to keep line left-justification, "\n" for {@link * StringCFGVisualizer} to simply add a new line - * @return the String representation of the transfer input of the block + * @return the visualization of the transfer input before the given block */ - protected String visualizeBlockTransferInputHelper( + protected String visualizeBlockTransferInputBeforeHelper( Block bb, Analysis analysis, String escapeString) { - assert analysis != null - : "analysis should be non-null when visualizing the transfer input of a block."; + if (analysis == null) { + throw new BugInCF( + "analysis must be non-null when visualizing the transfer input of a block."); + } - TransferInput input = analysis.getInput(bb); - assert input != null : "@AssumeAssertion(nullness): well-behaved analysis"; + S regularStore; + S thenStore = null; + S elseStore = null; + boolean isTwoStores = false; StringBuilder sbStore = new StringBuilder(); - - // Split input representation to two lines. sbStore.append("Before: "); - if (!input.containsTwoStores()) { - S regularStore = input.getRegularStore(); + + Direction analysisDirection = analysis.getDirection(); + + if (analysisDirection == Direction.FORWARD) { + TransferInput input = analysis.getInput(bb); + assert input != null : "@AssumeAssertion(nullness): invariant"; + isTwoStores = input.containsTwoStores(); + regularStore = input.getRegularStore(); + thenStore = input.getThenStore(); + elseStore = input.getElseStore(); + } else { + regularStore = analysis.getResult().getStoreBefore(bb); + } + + if (!isTwoStores) { sbStore.append(visualizeStore(regularStore)); } else { - S thenStore = input.getThenStore(); + assert thenStore != null : "@AssumeAssertion(nullness): invariant"; + assert elseStore != null : "@AssumeAssertion(nullness): invariant"; sbStore.append("then="); sbStore.append(visualizeStore(thenStore)); - S elseStore = input.getElseStore(); sbStore.append(", else="); sbStore.append(visualizeStore(elseStore)); } @@ -298,6 +303,58 @@ protected String visualizeBlockTransferInputHelper( return sbStore.toString(); } + /** + * Visualize the transfer input after the given block. + * + * @param bb the given block + * @param analysis the current analysis + * @param escapeString the escape String for the special need of visualization, e.g., "\\l" for + * {@link DOTCFGVisualizer} to keep line left-justification, "\n" for {@link + * StringCFGVisualizer} to simply add a new line + * @return the visualization of the transfer input after the given block + */ + protected String visualizeBlockTransferInputAfterHelper( + Block bb, Analysis analysis, String escapeString) { + if (analysis == null) { + throw new BugInCF( + "analysis should be non-null when visualizing the transfer input of a block."); + } + + S regularStore; + S thenStore = null; + S elseStore = null; + boolean isTwoStores = false; + + StringBuilder sbStore = new StringBuilder(); + sbStore.append("After: "); + + Direction analysisDirection = analysis.getDirection(); + + if (analysisDirection == Direction.FORWARD) { + regularStore = analysis.getResult().getStoreAfter(bb); + } else { + TransferInput input = analysis.getInput(bb); + assert input != null : "@AssumeAssertion(nullness): invariant"; + isTwoStores = input.containsTwoStores(); + regularStore = input.getRegularStore(); + thenStore = input.getThenStore(); + elseStore = input.getElseStore(); + } + + if (!isTwoStores) { + sbStore.append(visualizeStore(regularStore)); + } else { + assert thenStore != null : "@AssumeAssertion(nullness): invariant"; + assert elseStore != null : "@AssumeAssertion(nullness): invariant"; + sbStore.append("then="); + sbStore.append(visualizeStore(thenStore)); + sbStore.append(", else="); + sbStore.append(visualizeStore(elseStore)); + } + sbStore.insert(0, escapeString + "~~~~~~~~~" + escapeString); + return sbStore.toString(); + } + /** * Visualize a special block. * @@ -314,7 +371,7 @@ protected String visualizeSpecialBlockHelper(SpecialBlock sbb, String separator) case EXCEPTIONAL_EXIT: return "" + separator; default: - throw new Error("Unrecognized special block type: " + sbb.getType()); + throw new BugInCF("Unrecognized special block type: " + sbb.getType()); } } diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/CFGVisualizer.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/CFGVisualizer.java index 96f8297ac99..370254d1215 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/CFGVisualizer.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/CFGVisualizer.java @@ -172,13 +172,22 @@ public interface CFGVisualizer< String visualizeConditionalBlock(ConditionalBlock cbb); /** - * Visualize the transferInput of a Block based on the analysis. + * Visualize the transferInput before a Block based on the analysis. * * @param bb the block * @param analysis the current analysis - * @return the String representation of the transferInput of the given block + * @return the String representation of the transferInput before the given block */ - String visualizeBlockTransferInput(Block bb, Analysis analysis); + String visualizeBlockTransferInputBefore(Block bb, Analysis analysis); + + /** + * Visualize the transferInput after a Block based on the analysis. + * + * @param bb the block + * @param analysis the current analysis + * @return the String representation of the transferInput after the given block + */ + String visualizeBlockTransferInputAfter(Block bb, Analysis analysis); /** * Visualize a Node based on the analysis. diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/DOTCFGVisualizer.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/DOTCFGVisualizer.java index 40a69f8b1f1..6b843f57f32 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/DOTCFGVisualizer.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/DOTCFGVisualizer.java @@ -144,8 +144,13 @@ public String visualizeConditionalBlock(ConditionalBlock cbb) { } @Override - public String visualizeBlockTransferInput(Block bb, Analysis analysis) { - return super.visualizeBlockTransferInputHelper(bb, analysis, leftJustifiedTerminator); + public String visualizeBlockTransferInputBefore(Block bb, Analysis analysis) { + return super.visualizeBlockTransferInputBeforeHelper(bb, analysis, leftJustifiedTerminator); + } + + @Override + public String visualizeBlockTransferInputAfter(Block bb, Analysis analysis) { + return super.visualizeBlockTransferInputAfterHelper(bb, analysis, leftJustifiedTerminator); } /** diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/StringCFGVisualizer.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/StringCFGVisualizer.java index b9eacec46d8..c72663acf60 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/StringCFGVisualizer.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/StringCFGVisualizer.java @@ -85,8 +85,13 @@ public String visualizeConditionalBlock(ConditionalBlock cbb) { } @Override - public String visualizeBlockTransferInput(Block bb, Analysis analysis) { - return super.visualizeBlockTransferInputHelper(bb, analysis, lineSeparator); + public String visualizeBlockTransferInputBefore(Block bb, Analysis analysis) { + return super.visualizeBlockTransferInputBeforeHelper(bb, analysis, lineSeparator); + } + + @Override + public String visualizeBlockTransferInputAfter(Block bb, Analysis analysis) { + return super.visualizeBlockTransferInputAfterHelper(bb, analysis, lineSeparator); } @Override diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/Block.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/Block.java index 46ab60bcf1b..2db3de2894c 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/Block.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/Block.java @@ -1,5 +1,7 @@ package org.checkerframework.dataflow.cfg.block; +import java.util.Set; + /** Represents a basic block in a control flow graph. */ public interface Block { @@ -32,4 +34,11 @@ public static enum BlockType { * @return the unique identifier of this block */ long getId(); + + /** + * Returns the predecessors of this basic block. + * + * @return the predecessors of this basic block + */ + Set getPredecessors(); } diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/BlockImpl.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/BlockImpl.java index 86a6dbe7ad2..1d1961f8487 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/BlockImpl.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/block/BlockImpl.java @@ -43,11 +43,7 @@ public BlockType getType() { return type; } - /** - * Returns the list of predecessors of this basic block. - * - * @return the list of predecessors of this basic block - */ + @Override public Set getPredecessors() { return Collections.unmodifiableSet(predecessors); } diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/playground/ConstantPropagationPlayground.java b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/playground/ConstantPropagationPlayground.java index 5761d18320e..308f6acf819 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/cfg/playground/ConstantPropagationPlayground.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/cfg/playground/ConstantPropagationPlayground.java @@ -1,6 +1,7 @@ package org.checkerframework.dataflow.cfg.playground; -import org.checkerframework.dataflow.analysis.Analysis; +import org.checkerframework.dataflow.analysis.ForwardAnalysis; +import org.checkerframework.dataflow.analysis.ForwardAnalysisImpl; import org.checkerframework.dataflow.cfg.CFGVisualizeLauncher; import org.checkerframework.dataflow.constantpropagation.Constant; import org.checkerframework.dataflow.constantpropagation.ConstantPropagationStore; @@ -19,10 +20,10 @@ public static void main(String[] args) { // run the analysis and create a PDF file ConstantPropagationTransfer transfer = new ConstantPropagationTransfer(); - Analysis analysis = - new Analysis<>(transfer); + ForwardAnalysis + forwardAnalysis = new ForwardAnalysisImpl<>(transfer); CFGVisualizeLauncher cfgVisualizeLauncher = new CFGVisualizeLauncher(); cfgVisualizeLauncher.generateDOTofCFG( - inputFile, outputDir, method, clazz, true, false, analysis); + inputFile, outputDir, method, clazz, true, false, forwardAnalysis); } } diff --git a/dataflow/src/main/java/org/checkerframework/dataflow/constantpropagation/ConstantPropagationTransfer.java b/dataflow/src/main/java/org/checkerframework/dataflow/constantpropagation/ConstantPropagationTransfer.java index 3889151346c..5dc1ad85b90 100644 --- a/dataflow/src/main/java/org/checkerframework/dataflow/constantpropagation/ConstantPropagationTransfer.java +++ b/dataflow/src/main/java/org/checkerframework/dataflow/constantpropagation/ConstantPropagationTransfer.java @@ -3,8 +3,8 @@ import java.util.List; import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.dataflow.analysis.ConditionalTransferResult; +import org.checkerframework.dataflow.analysis.ForwardTransferFunction; import org.checkerframework.dataflow.analysis.RegularTransferResult; -import org.checkerframework.dataflow.analysis.TransferFunction; import org.checkerframework.dataflow.analysis.TransferInput; import org.checkerframework.dataflow.analysis.TransferResult; import org.checkerframework.dataflow.cfg.UnderlyingAST; @@ -19,7 +19,7 @@ public class ConstantPropagationTransfer extends AbstractNodeVisitor< TransferResult, TransferInput> - implements TransferFunction { + implements ForwardTransferFunction { @Override public ConstantPropagationStore initialStore( diff --git a/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractAnalysis.java b/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractAnalysis.java index 1e86f27bf28..999545467a1 100644 --- a/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractAnalysis.java +++ b/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractAnalysis.java @@ -10,7 +10,7 @@ import javax.lang.model.util.Types; import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.common.basetype.BaseTypeChecker; -import org.checkerframework.dataflow.analysis.Analysis; +import org.checkerframework.dataflow.analysis.ForwardAnalysisImpl; import org.checkerframework.dataflow.cfg.ControlFlowGraph; import org.checkerframework.framework.source.SourceChecker; import org.checkerframework.framework.type.AnnotatedTypeFactory; @@ -38,7 +38,7 @@ public abstract class CFAbstractAnalysis< V extends CFAbstractValue, S extends CFAbstractStore, T extends CFAbstractTransfer> - extends Analysis { + extends ForwardAnalysisImpl { /** The qualifier hierarchy for which to track annotations. */ protected final QualifierHierarchy qualifierHierarchy; diff --git a/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractTransfer.java b/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractTransfer.java index 9d147ada9ad..5f68c62d5de 100644 --- a/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractTransfer.java +++ b/framework/src/main/java/org/checkerframework/framework/flow/CFAbstractTransfer.java @@ -28,8 +28,8 @@ import org.checkerframework.dataflow.analysis.FlowExpressions.LocalVariable; import org.checkerframework.dataflow.analysis.FlowExpressions.Receiver; import org.checkerframework.dataflow.analysis.FlowExpressions.ThisReference; +import org.checkerframework.dataflow.analysis.ForwardTransferFunction; import org.checkerframework.dataflow.analysis.RegularTransferResult; -import org.checkerframework.dataflow.analysis.TransferFunction; import org.checkerframework.dataflow.analysis.TransferInput; import org.checkerframework.dataflow.analysis.TransferResult; import org.checkerframework.dataflow.cfg.UnderlyingAST; @@ -93,7 +93,7 @@ public abstract class CFAbstractTransfer< S extends CFAbstractStore, T extends CFAbstractTransfer> extends AbstractNodeVisitor, TransferInput> - implements TransferFunction { + implements ForwardTransferFunction { /** The analysis class this store belongs to. */ protected final CFAbstractAnalysis analysis;