public ValidMatches(PAG pag, FieldToEdgesMap fieldToStores) { for (Iterator iter = pag.loadSources().iterator(); iter.hasNext();) { FieldRefNode loadSource = (FieldRefNode) iter.next(); SparkField field = loadSource.getField(); VarNode loadBase = loadSource.getBase(); ArraySet<Pair<VarNode, VarNode>> storesOnField = fieldToStores.get(field); for (Pair<VarNode, VarNode> store : storesOnField) { VarNode storeBase = store.getO2(); if (loadBase.getP2Set().hasNonEmptyIntersection(storeBase.getP2Set())) { VarNode matchSrc = store.getO1(); Node[] loadTargets = pag.loadLookup(loadSource); for (int i = 0; i < loadTargets.length; i++) { VarNode matchTgt = (VarNode) loadTargets[i]; vMatchEdges.put(matchSrc, matchTgt); vMatchBarEdges.put(matchTgt, matchSrc); } } } } }
public static FieldAccessMap buildStoreMap(PAG pag) { FieldAccessMap ret = new FieldAccessMap(); Iterator frNodeIter = pag.storeInvSourcesIterator(); while (frNodeIter.hasNext()) { FieldRefNode frNode = (FieldRefNode) frNodeIter.next(); SparkField field = frNode.getField(); Node[] targets = pag.storeInvLookup(frNode); for (int i = 0; i < targets.length; i++) { VarNode target = (VarNode) targets[i]; if (target instanceof GlobalVarNode) continue; ret.put(field, new Pair<FieldRefNode, LocalVarNode>(frNode, (LocalVarNode) target)); } } return ret; }
protected PointsToSetInternal checkContextsForAllocsCache( VarAndContext varAndContext, AllocAndContextSet ret, PointsToSetInternal locs) { PointsToSetInternal retSet = null; if (contextsForAllocsCache.containsKey(varAndContext)) { for (AllocAndContext allocAndContext : contextsForAllocsCache.get( varAndContext).getO2()) { if (locs.contains(allocAndContext.alloc)) { ret.add(allocAndContext); } } final PointsToSetInternal oldLocs = contextsForAllocsCache.get( varAndContext).getO1(); final PointsToSetInternal tmpSet = new HybridPointsToSet(locs .getType(), pag); locs.forall(new P2SetVisitor() { @Override public void visit(Node n) { if (!oldLocs.contains(n)) { tmpSet.add(n); } } }); retSet = tmpSet; oldLocs.addAll(tmpSet, null); } else { PointsToSetInternal storedSet = new HybridPointsToSet(locs .getType(), pag); storedSet.addAll(locs, null); contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>( storedSet, new AllocAndContextSet())); retSet = locs; } return retSet; }
@SuppressWarnings("unused") protected Set<VarNode> nodesPropagatedThrough(final VarNode source, final PointsToSetInternal allocs) { final Set<VarNode> marked = new HashSet<VarNode>(); final Stack<VarNode> worklist = new Stack<VarNode>(); Propagator<VarNode> p = new Propagator<VarNode>(marked, worklist); p.prop(source); while (!worklist.isEmpty()) { VarNode curNode = worklist.pop(); Node[] assignSources = pag.simpleInvLookup(curNode); for (int i = 0; i < assignSources.length; i++) { VarNode assignSrc = (VarNode) assignSources[i]; if (assignSrc.getP2Set().hasNonEmptyIntersection(allocs)) { p.prop(assignSrc); } } Set<VarNode> matchSources = vMatches.vMatchInvLookup(curNode); for (VarNode matchSrc : matchSources) { if (matchSrc.getP2Set().hasNonEmptyIntersection(allocs)) { p.prop(matchSrc); } } } return marked; }
public boolean hasNonEmptyIntersection(PointsToSet other) { if (other instanceof AllocAndContextSet) { return nonEmptyHelper((AllocAndContextSet) other); } else if (other instanceof WrappedPointsToSet) { return hasNonEmptyIntersection(((WrappedPointsToSet) other).getWrapped()); } else if (other instanceof PointsToSetInternal) { return ((PointsToSetInternal) other).forall(new P2SetVisitor() { @Override public void visit(Node n) { if (!returnValue) { for (AllocAndContext allocAndContext : AllocAndContextSet.this) { if (n.equals(allocAndContext.alloc)) { returnValue = true; break; } } } } }); } throw new UnsupportedOperationException("can't check intersection with set of type " + other.getClass()); }
/** Merges other into this set. */ public void mergeWith( PointsToSetInternal other ) { if( !( other instanceof DoublePointsToSet ) ) { throw new RuntimeException( "NYI" ); } final DoublePointsToSet o = (DoublePointsToSet) other; if( other.type != null && !( other.type.equals( type ) ) ) { throw new RuntimeException( "different types "+type+" and "+other.type ); } if( other.type == null && type != null ) { throw new RuntimeException( "different types "+type+" and "+other.type ); } final PointsToSetInternal newNewSet = G.v().newSetFactory.newSet( type, pag ); final PointsToSetInternal newOldSet = G.v().oldSetFactory.newSet( type, pag ); oldSet.forall( new P2SetVisitor() { public final void visit( Node n ) { if( o.oldSet.contains( n ) ) newOldSet.add( n ); }} ); newNewSet.addAll( this, newOldSet ); newNewSet.addAll( o, newOldSet ); newSet = newNewSet; oldSet = newOldSet; }
/** Returns true iff the set contains n. */ public final boolean contains( Node n ) { int left = 0; int right = size; int hc = n.getNumber(); while( left < right ) { int mid = (left + right)/2; int midhc = nodes[mid].getNumber(); if( midhc < hc ) { left = mid+1; } else if( midhc > hc ) { right = mid; } else return true; } return false; }
/** * When the overflow list overflows, the maximum number of elements that may * remain in the overflow list (the rest are moved into the base bit vector) */ public boolean contains(Node n) { // Which should be checked first, bitVector or overflow? (for // performance) // I think the bit vector, since it only takes O(1) to check many // elements // Check the bit vector if (bitVector != null && bitVector.contains(n)) return true; // Check overflow if (overflow.contains(n)) return true; return false; }
public boolean forall(P2SetVisitor v) { // Iterate through the bit vector. Ripped from BitPointsToSet again. // It seems there should be a way to share code between BitPointsToSet // and // SharedHybridSet, but I don't know how at the moment. if (bitVector != null) { for (BitSetIterator it = bitVector.iterator(); it.hasNext();) { v.visit((Node) pag.getAllocNodeNumberer().get(it.next())); } } // Iterate through the overflow list for (OverflowList.ListNode i = overflow.overflow; i != null; i = i.next) { v.visit(i.elem); } return v.getReturnValue(); }
protected final boolean fastAdd( Node n ) { if( bits == null ) { for (int i = 0; i < nodes.length; i++) { if (nodes[i] == null) { empty = false; nodes[i] = n; return true; } else if (nodes[i] == n) { return false; } } convertToBits(); } boolean ret = bits.set( n.getNumber() ); if( ret ) empty = false; return ret; }
@Override public void injectPts() { final GeomPointsTo geomPTA = (GeomPointsTo)Scene.v().getPointsToAnalysis(); pt_objs = new HashMap<AllocNode, HeapInsIntervalManager>(); me.getP2Set().forall( new P2SetVisitor() { @Override public void visit(Node n) { if ( geomPTA.isValidGeometricNode(n) ) pt_objs.put((AllocNode)n, (HeapInsIntervalManager)stubManager); } }); new_pts = null; }
/** * Collecting basic statistical information for SPARK. */ public void profileSparkBasicMetrics() { int n_legal_var = 0; int[] limits = new int[] { 1, 5, 10, 25, 50, 75, 100 }; evalRes.pts_size_bar_spark = new Histogram(limits); for (IVarAbstraction pn : ptsProvider.pointers) { // We don't consider exception pointers Node var = pn.getWrappedNode(); if (ptsProvider.isExceptionPointer(var)) continue; ++n_legal_var; int size = var.getP2Set().size(); evalRes.pts_size_bar_spark.addNumber(size); evalRes.total_spark_pts += size; if (size > evalRes.max_pts_spark) evalRes.max_pts_spark = size; } evalRes.avg_spark_pts = (double) evalRes.total_spark_pts / n_legal_var; }
/** * Called after each round of collection. */ public void finish() { if ( readyToUse == false ) { // We flatten the list readyToUse = true; outList.clear(); if ( tableView.size() == 0 ) return; for ( Map.Entry<Node, List<VarType>> entry : tableView.entrySet() ) { List<VarType> resList = entry.getValue(); outList.addAll(resList); } } }
/** * Tests if two containers have contain same things. * Can be used to answer the alias query. */ public boolean hasNonEmptyIntersection(PtSensVisitor<VarType> other) { // Using table view for comparison, that's faster for ( Map.Entry<Node, List<VarType>> entry : tableView.entrySet() ) { Node var = entry.getKey(); List<VarType> list1 = entry.getValue(); List<VarType> list2 = other.getCSList(var); if ( list1.size() == 0 || list2.size() == 0 ) continue; for ( VarType cv1 : list1 ) { for ( VarType cv2 : list2 ) if ( cv1.intersect(cv2) ) return true; } } return false; }
/** * We transfer the SPARK results to current pointer if this pointer is not involved in the geometric analysis. * Note that, the unreachable objects will not be inserted. */ @Override public void injectPts() { final GeomPointsTo geomPTA = (GeomPointsTo)Scene.v().getPointsToAnalysis(); pt_objs = new HashMap<AllocNode, GeometricManager>(); me.getP2Set().forall( new P2SetVisitor() { @Override public void visit(Node n) { if ( geomPTA.isValidGeometricNode(n) ) pt_objs.put((AllocNode)n, (GeometricManager)stubManager); } }); new_pts = null; }
@Override public void injectPts() { final GeomPointsTo geomPTA = (GeomPointsTo)Scene.v().getPointsToAnalysis(); pt_objs = new HashMap<AllocNode, PtInsIntervalManager>(); me.getP2Set().forall( new P2SetVisitor() { @Override public void visit(Node n) { if ( geomPTA.isValidGeometricNode(n) ) pt_objs.put((AllocNode)n, (PtInsIntervalManager)stubManager); } }); new_pts = null; }
/** * Compute the refined points-to results for specified pointers. * @param initVars */ public void addUserDefPts( Set<Node> initVars ) { for ( Node vn : initVars ) { IVarAbstraction pn = geomPTA.findInternalNode(vn); if ( pn == null ) { // I don't know where is this pointer continue; } pn = pn.getRepresentative(); if ( pn.reachable() ) { pn.willUpdate = true; } } }
public void releaseSparkMem() { for (int i = 0; i < n_var; ++i) { IVarAbstraction pn = int2var.get(i); // Keep only the points-to results for representatives if ( pn != pn.getRepresentative() ) { continue; } if ( pn.willUpdate ) { Node vn = pn.getWrappedNode(); vn.discardP2Set(); } } System.gc(); System.gc(); System.gc(); System.gc(); }
/** * For many applications, they only need the context insensitive points-to result. * We provide a way to transfer our result back to SPARK. * After the transformation, we discard the context sensitive points-to information. * Therefore, if context sensitive queries are needed in future, please call ddSolve() for queried pointers first. */ public void transformToCIResult() { for ( IVarAbstraction pn : pointers ) { if ( pn.getRepresentative() != pn ) continue; Node node = pn.getWrappedNode(); node.discardP2Set(); PointsToSetInternal ptSet = node.makeP2Set(); for ( AllocNode obj : pn.get_all_points_to_objects() ) { ptSet.add( obj ); } pn.deleteAll(); } hasTransformed = true; }
private List<AllocNode> getMayAliasList(PointsToSetInternal pts) { List<AllocNode> list = new ArrayList<AllocNode>(); final HashSet<AllocNode> ret = new HashSet<AllocNode>(); pts.forall( new P2SetVisitor() { public void visit( Node n ) { ret.add( (AllocNode)n ); } } ); Iterator<AllocNode> it = ret.iterator(); while (it.hasNext()){ list.add( it.next() ); } return list; }
private boolean isArrayAccessShared(Local local) { PointsToSetInternal pts = (PointsToSetInternal) pag.reachingObjects(local); return pts.forall(new P2SetVisitor() { boolean isShared ; @Override public void visit(Node n) { if(!isShared) { int id = n.getNumber(); XNode xn = indexNodeMap.get(id); if(xn!=null) isShared = xn.isArrayShared(); } } @Override public boolean getReturnValue() { return isShared; } }); }
private void accessArray(Local local, final boolean isWrite) { PointsToSetInternal pts = (PointsToSetInternal) pag.reachingObjects(local); pts.forall(new P2SetVisitor() { @Override public void visit(Node n) { int id = n.getNumber(); XNode xn = indexNodeMap.get(id); if(xn==null) { xn = new XNode(); indexNodeMap.put(id,xn); } xn.accessArray(currentThreadID,isWrite); if(currentThread.runsMany) { xn.accessArray((byte) (currentThreadID+1),isWrite); } } }); }
private void accessField(Local local, final SootField sf, final boolean isWrite) { PointsToSetInternal pts = (PointsToSetInternal) pag.reachingObjects(local); pts.forall(new P2SetVisitor() { @Override public void visit(Node n) { int id = n.getNumber(); XNode xn = indexNodeMap.get(id); if(xn==null) { xn = new XNode(); indexNodeMap.put(id,xn); } accessField(id,sf,isWrite); } }); }
public void dump(String filename) { PrintWriter pw = null; try { pw = new PrintWriter(new FileOutputStream(filename)); } catch (FileNotFoundException e) { e.printStackTrace(); } // pw.println("digraph G {\n\trankdir=LR;"); pw.println("digraph G {"); Predicate<Node> falsePred = new Predicate<Node>() { @Override public boolean test(Node obj_) { return false; } }; for (Node node : nodes) { pw.println(PagToDotDumper.makeDotNodeLabel(node, falsePred)); } for (String edge : edges) { pw.println(edge); } pw.println("}"); pw.close(); }
public boolean shouldHandle(VarNode dst) { // TODO Auto-generated method stub P2SetVisitor v = new P2SetVisitor() { @Override public void visit(Node n) { if (!returnValue) { returnValue = !manager.castNeverFails(n.getType(), type); } } }; dst.getP2Set().forall(v); return v.getReturnValue(); }
/** Calls v's visit method on all nodes in this set. */ public final boolean forall( P2SetVisitor v ) { for( BitSetIterator it = bits.iterator(); it.hasNext(); ) { v.visit( (Node) pag.getAllocNodeNumberer().get( it.next() ) ); } return v.getReturnValue(); }
/** Adds n to this set, returns true if n was not already in this set. */ public final boolean add( Node n ) { if( pag.getTypeManager().castNeverFails( n.getType(), type ) ) { return fastAdd( n ); } return false; }
public boolean contains(Node n) { for (ListNode i = data; i != null; i = i.next) { if (i.elem == n) return true; } return false; }
public boolean add(Node n) { //Prevent repeated code by saying add() is just a union() with one element in the //set being merged in. add isn't called frequently anyway. //However, we have to call makeNode() to get the node, in case it was already there //and because union() assumes "other" is an existing node in another set. So we //create the node for the duration of the call to addOrAddAll(), after which we //delete it unless it was already there ListNode other = makeNode(n, null); other.incRefCount(); boolean added = addOrAddAll(data, other, null, null); other.decRefCount(); //undo its creation return added; }
private ListNode makeNode(Node elem, ListNode next) { Pair p = new Pair(elem, next); ListNode retVal = (AllSharedListNodes.v().allNodes.get(p)); if (retVal == null) //if it's not an existing node { retVal = new ListNode(elem, next); if (next != null) next.incRefCount(); //next now has an extra //thing pointing at it (the newly created node) AllSharedListNodes.v().allNodes.put(p, retVal); } return retVal; }
/** Adds n to this set, returns true if n was not already in this set. */ public final boolean add( Node n ) { if( pag.getTypeManager().castNeverFails( n.getType(), type ) ) { if( contains(n) ) return false; int left = 0; int right = size; int mid; int hc = n.getNumber(); while( left < right ) { mid = (left + right)/2; int midhc = nodes[mid].getNumber(); if( midhc < hc ) { left = mid+1; } else if( midhc > hc ) { right = mid; } else break; } if( nodes == null ) { nodes = new Node[size+4]; } else if( size == nodes.length ) { Node[] newNodes = new Node[size+4]; System.arraycopy( nodes, 0, newNodes, 0, nodes.length ); nodes = newNodes; } System.arraycopy( nodes, left, nodes, left+1, size-left ); nodes[left] = n; size++; return true; } return false; }
public boolean add(Node n) { /* * This algorithm is described in the paper "IBM Research Report: Fast * Pointer Analysis" by Hirzel, Dincklage, Diwan, and Hind, pg. 11 */ if (contains(n)) return false; ++numElements; if (!overflow.full()) { overflow.add(n); } else { // Put everything in the bitvector PointsToBitVector newBitVector; if (bitVector == null) newBitVector = new PointsToBitVector(pag.getAllocNodeNumberer() .size()); else newBitVector = new PointsToBitVector(bitVector); newBitVector.add(n); // add n to it add(newBitVector, overflow); // Now everything is in newBitVector, and it must have numElements // ones // The algorithm would still work without this step, but wouldn't be // a // shared implmentation at all. findAppropriateBitVector(newBitVector, null, 0, numElements - overflow.size() - 1); } return true; }
public OverflowList(PointsToBitVector bv) { BitSetIterator it = bv.iterator(); // Iterates over only the 1 bits while (it.hasNext()) { // Get the next node in the bitset by looking it up in the // pointer assignment graph. // Ripped from BitPointsToSet. Node n = (Node) (pag.getAllocNodeNumberer().get(it.next())); add(n); } }
public void add(Node n) { if (full()) throw new RuntimeException( "Can't add an element to a full overflow list."); overflow = new ListNode(n, overflow); ++overflowElements; }
public boolean contains(Node n) { for (ListNode l = overflow; l != null; l = l.next) { if (n == l.elem) return true; } return false; }
/** Calls v's visit method on all nodes in this set. */ public final boolean forall( P2SetVisitor v ) { if( bits == null ) { for (Node node : nodes) { if (node == null) return v.getReturnValue(); v.visit(node); } } else { for( BitSetIterator it = bits.iterator(); it.hasNext(); ) { v.visit(pag.getAllocNodeNumberer().get( it.next() ) ); } } return v.getReturnValue(); }
/** Returns true iff the set contains n. */ public final boolean contains( Node n ) { if( bits == null ) { for (Node node : nodes) { if (node == n) return true; if (node == null) { break; } } return false; } else { return bits.get( n.getNumber() ); } }
protected final void convertToBits() { if( bits != null ) return; // ++numBitVectors; bits = new BitVector( pag.getAllocNodeNumberer().size() ); for (Node node : nodes) { if (node != null) { fastAdd(node); } } }