final private BitVector makeMaskOfInterface(SootClass interf) { if (!(interf.isInterface())) throw new RuntimeException(); BitVector ret = new BitVector(pag.getAllocNodeNumberer().size()); typeMask.put(interf.getType(), ret); Collection<SootClass> implementers = fh.getAllImplementersOfInterface(interf); for (SootClass impl : implementers) { BitVector other = (BitVector)typeMask.get(impl.getType()); if (other == null) other = makeClassTypeMask(impl); ret.or(other); } // I think, the following can be eliminated. It is added to make // type-masks exactly the same as the original type-masks if (implementers.size() == 0) { for (AllocNode an : anySubtypeAllocs) ret.set(an.getNumber()); } return ret; }
@Override public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) { int code = 0; pres.I1 = I1; pres.I2 = I2; pres.L = L; if ( I1 == 0 ) code = ( I2 == 0 ? HeapInsIntervalManager.ALL_TO_ALL : HeapInsIntervalManager.ALL_TO_MANY ); else code = ( I2 == 0 ? HeapInsIntervalManager.MANY_TO_ALL : HeapInsIntervalManager.ONE_TO_ONE ); return addPointsTo(code, obj); }
@Override public int count_pts_intervals(AllocNode obj) { int ret = 0; SegmentNode[] int_entry = find_points_to(obj); for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) { SegmentNode p = int_entry[j]; while (p != null) { ++ret; p = p.next; } } return ret; }
@Override public void print_context_sensitive_points_to( PrintStream outPrintStream ) { for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) { AllocNode obj = it.next(); SegmentNode[] int_entry = find_points_to( obj ); for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) { SegmentNode p = int_entry[j]; while (p != null) { outPrintStream.println("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")"); p = p.next; } } } }
@Override public boolean pointer_interval_points_to(long l, long r, AllocNode obj) { SegmentNode[] int_entry = find_points_to(obj); if (int_entry == null) return false; // Check all-to-many figures if ( int_entry[HeapInsIntervalManager.ALL_TO_MANY] != null ) return true; for ( int i = 1; i < HeapInsIntervalManager.Divisions; ++i ) { SegmentNode p = int_entry[i]; while ( p != null ) { long R = p.I1 + p.L; if ( (l <= p.I1 && p.I1 < r) || ( p.I1 <= l && l < R) ) return true; p = p.next; } } return false; }
@Override public void injectPts() { final GeomPointsTo geomPTA = (GeomPointsTo)Scene.v().getPointsToAnalysis(); pt_objs = new HashMap<AllocNode, HeapInsIntervalManager>(); me.getP2Set().forall( new P2SetVisitor() { @Override public void visit(Node n) { if ( geomPTA.isValidGeometricNode(n) ) pt_objs.put((AllocNode)n, (HeapInsIntervalManager)stubManager); } }); new_pts = null; }
private boolean addPointsTo( int code, AllocNode obj ) { HeapInsIntervalManager im = pt_objs.get(obj); if ( im == null ) { im = new HeapInsIntervalManager(); pt_objs.put(obj, im); } else if ( im == deadManager ) { // We preclude the propagation of this object return false; } // pres has been filled properly before calling this method if ( im.addNewFigure(code, pres) != null ) { new_pts.put(obj, im); return true; } return false; }
@Override public int count_pts_intervals(AllocNode obj) { int ret = 0; SegmentNode[] int_entry = find_points_to(obj); for (int j = 0; j < GeometricManager.Divisions; ++j) { SegmentNode p = int_entry[j]; while (p != null) { ++ret; p = p.next; } } return ret; }
@Override public void print_context_sensitive_points_to(PrintStream outPrintStream) { for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) { AllocNode obj = it.next(); SegmentNode[] int_entry = find_points_to( obj ); for (int j = 0; j < GeometricManager.Divisions; ++j) { SegmentNode p = int_entry[j]; while (p != null) { outPrintStream.print("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ", " ); if ( p instanceof RectangleNode ) outPrintStream.print( ((RectangleNode)p).L_prime + ", " ); outPrintStream.println( symbols[j] + ")"); p = p.next; } } } }
/** * We transfer the SPARK results to current pointer if this pointer is not involved in the geometric analysis. * Note that, the unreachable objects will not be inserted. */ @Override public void injectPts() { final GeomPointsTo geomPTA = (GeomPointsTo)Scene.v().getPointsToAnalysis(); pt_objs = new HashMap<AllocNode, GeometricManager>(); me.getP2Set().forall( new P2SetVisitor() { @Override public void visit(Node n) { if ( geomPTA.isValidGeometricNode(n) ) pt_objs.put((AllocNode)n, (GeometricManager)stubManager); } }); new_pts = null; }
@Override public boolean pointer_interval_points_to(long l, long r, AllocNode obj) { SegmentNode[] int_entry = find_points_to(obj); for ( int i = 0; i < GeometricManager.Divisions; ++i ) { SegmentNode p = int_entry[i]; while ( p != null ) { long R = p.I1 + p.L; if ( (l <= p.I1 && p.I1 < r) || ( p.I1 <= l && l < R) ) return true; p = p.next; } } return false; }
/** * A non-interface public function. * It adds the points-to tuple to the geometric manager. */ private boolean addPointsTo(int code, AllocNode obj) { GeometricManager gm = pt_objs.get(obj); if ( gm == null ) { gm = new GeometricManager(); pt_objs.put(obj, gm); } else if ( gm == deadManager ) { // We preclude the propagation of this object return false; } SegmentNode p = gm.addNewFigure( code, pres ); if ( p != null ) { new_pts.put(obj, gm); return true; } return false; }
/** * Implements the pointer assignment inference rules. * The pts and pe are the points-to tuple and flow edge * pres is the computed result * code indicates the types of the pts and pe * * Return value is used to indicate the type of the result */ private static boolean reasonAndPropagate( FullSensitiveNode qn, AllocNode obj, SegmentNode pts, SegmentNode pe, int code ) { int ret_type = GeometricManager.Undefined_Mapping; switch ( code >> 8 ) { case GeometricManager.ONE_TO_ONE: // points-to is a 1-1 mapping ret_type = infer_pts_is_one_to_one(pts, pe, code & 255 ); break; case GeometricManager.MANY_TO_MANY: // points-to is a mangy-many mapping ret_type = infer_pts_is_many_to_many((RectangleNode)pts, pe, code & 255 ); break; } if (ret_type != GeometricManager.Undefined_Mapping) return qn.addPointsTo( ret_type, obj ); return false; }
@Override public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) { int code = 0; pres.I1 = I1; pres.I2 = I2; pres.L = L; if ( I1 == 0 ) code = ( I2 == 0 ? PtInsIntervalManager.ALL_TO_ALL : PtInsIntervalManager.ALL_TO_MANY ); else code = ( I2 == 0 ? PtInsIntervalManager.MANY_TO_ALL : PtInsIntervalManager.ONE_TO_ONE ); return addPointsTo(code, obj); }
@Override public int count_pts_intervals(AllocNode obj) { int ret = 0; SegmentNode[] int_entry = find_points_to(obj); for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) { SegmentNode p = int_entry[j]; while (p != null) { ++ret; p = p.next; } } return ret; }
@Override public void print_context_sensitive_points_to( PrintStream outPrintStream ) { for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) { AllocNode obj = it.next(); SegmentNode[] int_entry = find_points_to( obj ); if(int_entry!=null) { for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) { SegmentNode p = int_entry[j]; while (p != null) { outPrintStream.println("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")"); p = p.next; } } } } }
@Override public boolean pointer_interval_points_to(long l, long r, AllocNode obj) { SegmentNode[] int_entry = find_points_to(obj); // Check all-to-many figures if ( int_entry[PtInsIntervalManager.ALL_TO_MANY] != null ) return true; for ( int i = 1; i < HeapInsIntervalManager.Divisions; ++i ) { SegmentNode p = int_entry[i]; while ( p != null ) { long R = p.I1 + p.L; if ( (l <= p.I1 && p.I1 < r) || ( p.I1 <= l && l < R) ) return true; p = p.next; } } return false; }
@Override public void injectPts() { final GeomPointsTo geomPTA = (GeomPointsTo)Scene.v().getPointsToAnalysis(); pt_objs = new HashMap<AllocNode, PtInsIntervalManager>(); me.getP2Set().forall( new P2SetVisitor() { @Override public void visit(Node n) { if ( geomPTA.isValidGeometricNode(n) ) pt_objs.put((AllocNode)n, (PtInsIntervalManager)stubManager); } }); new_pts = null; }
private boolean addPointsTo( int code, AllocNode obj ) { PtInsIntervalManager im = pt_objs.get(obj); if ( im == null ) { im = new PtInsIntervalManager(); pt_objs.put(obj, im); } else if ( im == deadManager ) { // We preclude the propagation of this object return false; } // pres has been filled properly before calling this method if ( im.addNewFigure(code, pres) != null ) { new_pts.put(obj, im); return true; } return false; }
/** * Obtain the set of possible call targets at given @param callsite. */ private void getCallTargets(IVarAbstraction pn, SootMethod src, Stmt callsite, ChunkedQueue<SootMethod> targetsQueue) { InstanceInvokeExpr iie = (InstanceInvokeExpr)callsite.getInvokeExpr(); Local receiver = (Local)iie.getBase(); NumberedString subSig = iie.getMethodRef().getSubSignature(); // We first build the set of possible call targets for (AllocNode an : pn.get_all_points_to_objects()) { Type type = an.getType(); if (type == null) continue; VirtualCalls.v().resolve(type, receiver.getType(), subSig, src, targetsQueue); } }
/** * For many applications, they only need the context insensitive points-to result. * We provide a way to transfer our result back to SPARK. * After the transformation, we discard the context sensitive points-to information. * Therefore, if context sensitive queries are needed in future, please call ddSolve() for queried pointers first. */ public void transformToCIResult() { for ( IVarAbstraction pn : pointers ) { if ( pn.getRepresentative() != pn ) continue; Node node = pn.getWrappedNode(); node.discardP2Set(); PointsToSetInternal ptSet = node.makeP2Set(); for ( AllocNode obj : pn.get_all_points_to_objects() ) { ptSet.add( obj ); } pn.deleteAll(); } hasTransformed = true; }
/** * Obtain or create an internal representation of an object field. */ public IVarAbstraction findAndInsertInstanceField(AllocNode obj, SparkField field) { AllocDotField af = findAllocDotField(obj, field); IVarAbstraction pn = null; if ( af == null ) { // We create a new instance field node w.r.t type compatiblity Type decType = ((SootField) field).getDeclaringClass().getType(); Type baseType = obj.getType(); // baseType must be a sub type of decType if ( typeManager.castNeverFails(baseType, decType) ) { af = makeAllocDotField(obj, field); pn = makeInternalNode(af); pointers.add(pn); } } else { pn = consG.get(af); } return pn; }
private List<AllocNode> getMayAliasList(PointsToSetInternal pts) { List<AllocNode> list = new ArrayList<AllocNode>(); final HashSet<AllocNode> ret = new HashSet<AllocNode>(); pts.forall( new P2SetVisitor() { public void visit( Node n ) { ret.add( (AllocNode)n ); } } ); Iterator<AllocNode> it = ret.iterator(); while (it.hasNext()){ list.add( it.next() ); } return list; }
public static Collection<AllocNode> getModuleAllocationSites(SootClass module) { Collection<AllocNode> allocSites=new LinkedList<AllocNode>(); PointsToAnalysis pta=Scene.v().getPointsToAnalysis(); PAG pag; Iterator it; assert pta instanceof PAG; pag=(PAG)pta; for (it=pag.getAllocNodeNumberer().iterator(); it.hasNext(); ) { AllocNode an=(AllocNode)it.next(); if (isModuleInstance(module.getType(),an.getType())) allocSites.add(an); } return allocSites; }
public static boolean isStringNode(VarNode node) { if (node instanceof GlobalVarNode) { GlobalVarNode global = (GlobalVarNode) node; if (global.getVariable() instanceof AllocNode) { AllocNode alloc = (AllocNode) global.getVariable(); return alloc.getNewExpr() == PointsToAnalysis.STRING_NODE || alloc instanceof StringConstantNode; } } return false; }
/** * compute a flows-to set for an allocation site. for now, we use a simple * refinement strategy; just refine as much as possible, maintaining the * smallest set of flows-to vars * * @param alloc * @param heuristic * @return */ protected Set<VarNode> computeFlowsTo(AllocNode alloc, HeuristicType heuristic) { this.fieldCheckHeuristic = HeuristicType.getHeuristic(heuristic, pag .getTypeManager(), getMaxPasses()); numPasses = 0; Set<VarNode> smallest = null; while (true) { numPasses++; if (DEBUG_PASS != -1 && numPasses > DEBUG_PASS) { return smallest; } if (numPasses > maxPasses) { return smallest; } if (DEBUG) { G.v().out.println("PASS " + numPasses); G.v().out.println(fieldCheckHeuristic); } clearState(); Set<VarNode> result = null; try { result = getFlowsToHelper(new AllocAndContext(alloc, EMPTY_CALLSTACK)); } catch (TerminateEarlyException e) { } if (result != null) { if (smallest == null || result.size() < smallest.size()) { smallest = result; } } if (!fieldCheckHeuristic.runNewPass()) { return smallest; } } }
public Set<ClassConstant> possibleClassConstants() { Set<ClassConstant> res = new HashSet<ClassConstant>(); for (AllocAndContext allocAndContext : this) { AllocNode n = allocAndContext.alloc; if( n instanceof ClassConstantNode ) { res.add( ((ClassConstantNode)n).getClassConstant() ); } else { return null; } } return res; }
public Set<String> possibleStringConstants() { Set<String> res = new HashSet<String>(); for (AllocAndContext allocAndContext : this) { AllocNode n = allocAndContext.alloc; if( n instanceof StringConstantNode ) { res.add( ((StringConstantNode)n).getString() ); } else { return null; } } return res; }
final public BitVector get( Type type ) { if( type == null ) return null; while(allocNodeListener.hasNext()) { AllocNode n = allocNodeListener.next(); for( final Type t : Scene.v().getTypeNumberer()) { if( !(t instanceof RefLikeType) ) continue; if( t instanceof AnySubType ) continue; if( isUnresolved(t) ) continue; if( castNeverFails( n.getType(), t ) ) { BitVector mask = typeMask.get( t ); if( mask == null ) { typeMask.put( t, mask = new BitVector() ); for( final AllocNode an : pag.getAllocNodeNumberer()) { if( castNeverFails( an.getType(), t ) ) { mask.set( an.getNumber() ); } } continue; } mask.set( n.getNumber() ); } } } BitVector ret = (BitVector) typeMask.get( type ); if( ret == null && fh != null ) // If we have a phantom class and have no type mask, we assume that // it is not cast-compatible to anything if (type instanceof RefType && ((RefType) type).getSootClass().isPhantom()) return new BitVector(); else throw new RuntimeException( "Type mask not found for type "+type ); return ret; }
final public void makeTypeMask() { RefType.v( "java.lang.Class" ); typeMask = new LargeNumberedMap<Type, BitVector>( Scene.v().getTypeNumberer() ); if( fh == null ) return; int numTypes = Scene.v().getTypeNumberer().size(); if( pag.getOpts().verbose() ) G.v().out.println( "Total types: "+numTypes ); // ** initClass2allocs(); makeClassTypeMask(Scene.v().getSootClass("java.lang.Object")); // ** ArrayNumberer<AllocNode> allocNodes = pag.getAllocNodeNumberer(); for( Type t : Scene.v().getTypeNumberer()) { if( !(t instanceof RefLikeType) ) continue; if( t instanceof AnySubType ) continue; if( isUnresolved(t) ) continue; // ** if (t instanceof RefType && !t.equals(RefType.v("java.lang.Object")) && !t.equals(RefType.v("java.io.Serializable")) && !t.equals(RefType.v("java.lang.Cloneable"))) { SootClass sc = ((RefType)t).getSootClass(); if (sc.isInterface()) { makeMaskOfInterface(sc); } continue; } // ** BitVector mask = new BitVector( allocNodes.size() ); for( Node n : allocNodes) { if( castNeverFails( n.getType(), t ) ) { mask.set( n.getNumber() ); } } typeMask.put( t, mask ); } allocNodeListener = pag.allocNodeListener(); }
@Override public IVarAbstraction generateNode(Node vNode ) { IVarAbstraction ret = null; if ( vNode instanceof AllocNode || vNode instanceof FieldRefNode ) { ret = new DummyNode(vNode); } else { ret = new HeapInsNode(vNode); } return ret; }
@Override public void reconstruct() { flowto = new HashMap<HeapInsNode, HeapInsIntervalManager>(); pt_objs = new HashMap<AllocNode, HeapInsIntervalManager>(); new_pts = new HashMap<AllocNode, HeapInsIntervalManager>(); complex_cons = null; lrf_value = 0; }
/** * Remember to clean the is_new flag */ @Override public void do_after_propagation() { for (HeapInsIntervalManager im : new_pts.values() ) { im.flush(); } new_pts = new HashMap<AllocNode, HeapInsIntervalManager>(); }
/** * Query if this pointer and qv could point to the same object under any contexts */ @Override public boolean heap_sensitive_intersection(IVarAbstraction qv) { int i, j; HeapInsNode qn; SegmentNode p, q, pt[], qt[]; qn = (HeapInsNode)qv; for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) { AllocNode an = it.next(); if ( an instanceof ClassConstantNode ) continue; if ( an instanceof StringConstantNode ) continue; qt = qn.find_points_to(an); if (qt == null) continue; pt = find_points_to(an); for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) { p = pt[i]; while (p != null) { for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) { q = qt[j]; while (q != null) { if (quick_intersecting_test(p, q)) return true; q = q.next; } } p = p.next; } } } return false; }
@Override public Set<AllocNode> get_all_points_to_objects() { // If this pointer is not a representative pointer if ( parent != this ) return getRepresentative().get_all_points_to_objects(); return pt_objs.keySet(); }
@Override public IVarAbstraction generateNode(Node vNode) { IVarAbstraction ret; if ( vNode instanceof AllocNode || vNode instanceof FieldRefNode ) { ret = new DummyNode(vNode); } else { ret = new FullSensitiveNode( vNode ); } return ret; }
@Override public void reconstruct() { flowto = new HashMap<FullSensitiveNode, GeometricManager>(); pt_objs = new HashMap<AllocNode, GeometricManager>(); new_pts = new HashMap<AllocNode, GeometricManager>(); complex_cons = null; lrf_value = 0; }
@Override public void do_after_propagation() { if ( new_pts.size() > 0 ) { for (GeometricManager gm : new_pts.values()) { gm.flush(); } } new_pts = new HashMap<AllocNode, GeometricManager>(); }
@Override public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) { pres.I1 = I1; pres.I2 = I2; pres.L = L; return addPointsTo(GeometricManager.ONE_TO_ONE, obj); }