@Test public void progressInBoundsAndMonotonicallyIncreasing() { final String[] content = {}; entityBag.add(new MockEntity("A_1", content)); entityBag.add(new MockEntity("A_2", content)); entityBag.add(new MockEntity("B", content)); entityBag.add(new MockEntity("C", content)); entityBag.add(new MockEntity("D", content)); final MutableDouble lastProgress = new MutableDouble(-Double.MAX_VALUE); final AtomicReference<String> lastOperation = new AtomicReference<String>(""); entityBag.addProgressListener((progress, operation) -> { assertThat(progress, is(lessThanOrEqualTo(1d))); if (!StringUtils.equals(operation, lastOperation.get())) { lastOperation.set(operation); return; } assertThat(progress, greaterThanOrEqualTo(lastProgress.getValue())); lastProgress.setValue(progress); }); entityBag.mergeSimilars(); }
private void checkAndCreateDemand(long layerId, long demandId) { if (!accum_avgDemandOfferedTraffic.get(layerId).containsKey(demandId)) { /* Initialize demand information */ accum_avgDemandOfferedTraffic.get(layerId).put(demandId, new MutableDouble()); minDemandOfferedTraffic.get(layerId).put(demandId, new MutableDouble(Double.MAX_VALUE)); maxDemandOfferedTraffic.get(layerId).put(demandId, new MutableDouble()); accum_avgDemandCarriedTraffic.get(layerId).put(demandId, new MutableDouble()); minDemandCarriedTraffic.get(layerId).put(demandId, new MutableDouble(Double.MAX_VALUE)); maxDemandCarriedTraffic.get(layerId).put(demandId, new MutableDouble()); accum_avgDemandBlockedTraffic.get(layerId).put(demandId, new MutableDouble()); minDemandBlockedTraffic.get(layerId).put(demandId, new MutableDouble(Double.MAX_VALUE)); maxDemandBlockedTraffic.get(layerId).put(demandId, new MutableDouble()); accum_avgExcessCarriedTraffic.get(layerId).put(demandId, new MutableDouble()); minDemandExcessCarriedTraffic.get(layerId).put(demandId, new MutableDouble(Double.MAX_VALUE)); maxDemandExcessCarriedTraffic.get(layerId).put(demandId, new MutableDouble()); accum_demandAvailabilityClassic.get(layerId).put(demandId, new MutableDouble()); accum_demandAvailabilityWeighted.get(layerId).put(demandId, new MutableDouble()); excessDemandCarriedTrafficTime.get(layerId).put(demandId, new MutableDouble()); demandTotalTime.get(layerId).put(demandId, new MutableDouble()); } }
private void checkAndCreateLink(long layerId, long linkId) { if (!accum_avgLinkLengthInKm.get(layerId).containsKey(linkId)) { /* Initialize link information */ accum_avgLinkLengthInKm.get(layerId).put(linkId, new MutableDouble()); minLinkLengthInKm.get(layerId).put(linkId, new MutableDouble(Double.MAX_VALUE)); maxLinkLengthInKm.get(layerId).put(linkId, new MutableDouble()); accum_avgCapacity.get(layerId).put(linkId, new MutableDouble()); minCapacity.get(layerId).put(linkId, new MutableDouble(Double.MAX_VALUE)); maxCapacity.get(layerId).put(linkId, new MutableDouble()); accum_avgLinkOccupiedCapacity.get(layerId).put(linkId, new MutableDouble()); minLinkOccupiedCapacity.get(layerId).put(linkId, new MutableDouble(Double.MAX_VALUE)); maxLinkOccupiedCapacity.get(layerId).put(linkId, new MutableDouble()); accum_avgUtilization.get(layerId).put(linkId, new MutableDouble()); minUtilization.get(layerId).put(linkId, new MutableDouble(Double.MAX_VALUE)); maxUtilization.get(layerId).put(linkId, new MutableDouble()); accum_avgOversubscribedCapacity.get(layerId).put(linkId, new MutableDouble()); minOversubscribedCapacity.get(layerId).put(linkId, new MutableDouble(Double.MAX_VALUE)); maxOversubscribedCapacity.get(layerId).put(linkId, new MutableDouble()); accum_linkOversubscribedTime.get(layerId).put(linkId, new MutableDouble()); accum_linkUpTime.get(layerId).put(linkId, new MutableDouble()); accum_linkTotalTime.get(layerId).put(linkId, new MutableDouble()); } }
private void checkAndCreateNode(long layerId, long nodeId) { if (!accum_avgNodeInDegree.get(layerId).containsKey(nodeId)) { accum_avgNodeInDegree.get(layerId).put(nodeId, new MutableDouble()); minNodeInDegree.get(layerId).put(nodeId, Integer.MAX_VALUE); maxNodeInDegree.get(layerId).put(nodeId, 0); accum_avgNodeOutDegree.get(layerId).put(nodeId, new MutableDouble()); minNodeOutDegree.get(layerId).put(nodeId, Integer.MAX_VALUE); maxNodeOutDegree.get(layerId).put(nodeId, 0); accum_avgNodeIngressTraffic.get(layerId).put(nodeId, new MutableDouble()); maxNodeIngressTraffic.get(layerId).put(nodeId, 0.0); minNodeIngressTraffic.get(layerId).put(nodeId, Double.MAX_VALUE); accum_avgNodeEgressTraffic.get(layerId).put(nodeId, new MutableDouble()); maxNodeEgressTraffic.get(layerId).put(nodeId, 0.0); minNodeEgressTraffic.get(layerId).put(nodeId, Double.MAX_VALUE); } }
@Test public void SumTest() { SumInt si = new SumInt(); SumLong sl = new SumLong(); SumFloat sf = new SumFloat(); SumDouble sd = new SumDouble(); Assert.assertEquals(new MutableInt(10), si.accumulate(si.defaultAccumulatedValue(), 10)); Assert.assertEquals(new MutableInt(11), si.accumulate(new MutableInt(1), 10)); Assert.assertEquals(new MutableInt(22), si.merge(new MutableInt(1), new MutableInt(21))); Assert.assertEquals(new MutableLong(10L), sl.accumulate(sl.defaultAccumulatedValue(), 10L)); Assert.assertEquals(new MutableLong(22L), sl.accumulate(new MutableLong(2L), 20L)); Assert.assertEquals(new MutableLong(41L), sl.merge(new MutableLong(32L), new MutableLong(9L))); Assert.assertEquals(new MutableFloat(9.0F), sf.accumulate(sf.defaultAccumulatedValue(), 9.0F)); Assert.assertEquals(new MutableFloat(22.5F), sf.accumulate(new MutableFloat(2.5F), 20F)); Assert.assertEquals(new MutableFloat(41.0F), sf.merge(new MutableFloat(33.1F), new MutableFloat(7.9F))); Assert.assertEquals(new MutableDouble(9.0), sd.accumulate(sd.defaultAccumulatedValue(), 9.0)); Assert.assertEquals(new MutableDouble(22.5), sd.accumulate(new MutableDouble(2.5), 20.0)); Assert.assertEquals(new MutableDouble(41.0), sd.merge(new MutableDouble(33.1), new MutableDouble(7.9))); }
/** * Do gradient descent on the weights of each basis * * @param observation * @param fisher * @param model */ @Override public void addObservation( GeographicalObservation<Double> observation, Fisher fisher, FishState model) { //get x and y double[] x = extractObservation(observation.getTile(), observation.getTime(), fisher, model); double y = observation.getValue(); //now get prediction double prediction = predict(x); //gradient descent! double increment = learningRate * (y-prediction); for(Pair<RBFBasis,MutableDouble> basis : network) { //todo you can make this faster by storing the evaluate from the predict call! basis.getSecond().setValue(basis.getSecond().doubleValue() + increment * basis.getFirst().evaluate(x)); } }
protected static double computeTotalUsingIterator(final ChronicleMap<LongValue, PortfolioAssetInterface> cache, int start, int end) { if (end > start) { final PortfolioAssetInterface asset = Values.newHeapInstance(PortfolioAssetInterface.class); PortfolioValueAccumulator accumulator = new PortfolioValueAccumulator(new MutableDouble(), asset); for (int s = start; s < end; s++) { try (MapSegmentContext<LongValue, PortfolioAssetInterface, ?> context = cache.segmentContext(s)) { context.forEachSegmentEntry(accumulator); } } return accumulator.total.doubleValue(); } return 0; }
private Pair<Integer, Double> findBestClusterToMerge(int origCluster, int minCluster, int maxCluster, ContextCounts clusterContextCounts) { MutableDouble bestScore = new MutableDouble(-Double.MAX_VALUE); MutableInt bestCluster = new MutableInt(-1); Utils.fasterParallelStream(clusterContextCounts.getAllClusters()).forEach(cluster -> { if (cluster >= minCluster && cluster < maxCluster && cluster != origCluster) { double score = computeMergeScore(origCluster, 0.0, cluster, clusterContextCounts); if (score > bestScore.doubleValue()) { synchronized (bestScore) { if (score > bestScore.doubleValue()) { //bestScore might have changed while acquiring lock bestScore.setValue(score); bestCluster.setValue(cluster); } } } } }); return new Pair<>(bestCluster.intValue(), bestScore.doubleValue()); }
public static StopModel searchJams(TrafficJamModel[] jams, String lineID, int direction, MutableInt jammedStopIndex, MutableDouble jammedStopDelay, String[] jammedStopTitle) { for(TrafficJamModel jam : jams) { for(int i = 0 ; i < jam.slowVehicles.length; i++) { if(jam.slowVehicles[i].lineID.equals(lineID)) { if(jam.slowVehicles[i].direction == direction){ jammedStopIndex.setValue(i); jammedStopDelay.setValue(jam.cumulativeDelays[i]); jammedStopTitle[0] = jam.vehicleStops[i].name; return jam.vehicleStops[i]; } } } } return null; }
private void helpGradByFiniteDiffs(Algebra tmpS) { Tensor t1 = new Tensor(s, 4,4); Identity<Tensor> id1 = new Identity<Tensor>(t1); Identity<Tensor> temp = new Identity<Tensor>(Tensor.getScalarTensor(s, 2)); SoftmaxMbrDepParse ea = new SoftmaxMbrDepParse(id1, temp, tmpS); int numParams = ModuleFn.getOutputSize(ea.getInputs()); IntDoubleDenseVector x = ModuleTestUtils.getAbsZeroOneGaussian(numParams); final MutableDouble sum = new MutableDouble(0); x.iterate(new FnIntDoubleToVoid() { public void call(int idx, double val) { sum.add(val); } }); x.scale(-1.0/sum.doubleValue()); ModuleTestUtils.assertGradientCorrectByFd(ea, x, 1e-8, 1e-5); }
protected void setAngledSprite(SpriteList sprite) { MutableDouble angle = new MutableDouble(Math.toDegrees(Math.atan2(velY, velX))); Map<BodyPart, SpriteRelation> angledSprite = new HashMap<BodyPart, SpriteRelation>(); angledSprite.put(BodyPart.MAIN, new AngledSpriteRelation(new Sprite(sprite), this, angle, 0, 0, this.bounds.getWidth(), this.bounds.getHeight(), this.bounds.posX(), this.bounds.posY())); setAdditionalSprites(angledSprite); }
@Test public void progressMonotonicallyGrows() { MutableDouble lastProgress = new MutableDouble(-Double.MAX_VALUE); AtomicReference<String> lastOperation = new AtomicReference<String>(""); mergeManager.addProgressListener((progress, operation) -> { if (!lastOperation.get().equals(operation)) lastOperation.set(operation); else assertThat(progress, greaterThanOrEqualTo(lastProgress.getValue())); lastProgress.setValue(progress); }); mergeManager.execute(); }
private void checkAndCreateNode(long nodeId) { if (!accum_nodeUpTime.containsKey(nodeId)) { /* Initialize node information */ accum_nodeUpTime.put(nodeId, new MutableDouble()); accum_nodeTotalTime.put(nodeId, new MutableDouble()); } }
private void subtractPoint(final Integer index, final MutableDouble value) { data.compute( index, (i, v) -> { v.subtract(value); return v; }); }
/** * @param rivString : A string representation of a RIV, generally got by calling RIV.toString(). * @return a MapRIV */ public static MapRIV fromString(final String rivString) { String[] pointStrings = rivString.split(" "); final int last = pointStrings.length - 1; final int size = Integer.parseInt(pointStrings[last]); pointStrings = Arrays.copyOf(pointStrings, last); final ConcurrentHashMap<Integer, MutableDouble> elts = new ConcurrentHashMap<>(); for (final String s : pointStrings) { final String[] elt = s.split("\\|"); if (elt.length != 2) throw new IndexOutOfBoundsException("Wrong number of partitions: " + s); else elts.put(Integer.parseInt(elt[0]), new MutableDouble(Double.parseDouble(elt[1]))); } return new MapRIV(elts, size).destructiveRemoveZeros(); }
/** * Group the points with similar Geohash at specified level * * @param points * @param level Geohash level * @return */ private Map<String, MutableDouble> mapByGeoHash(List<GeoPoint> points, int level) { if (level < 1 || points == null) throw new IllegalArgumentException(); Map<String, MutableDouble> hashPoints = new HashMap<String, MutableDouble>(); for (GeoPoint point : points) { try { // Calculate geo hash String hash = GeohashUtils.encodeLatLon(point.getLatitude(), point.getLongitude()); // Cut to geo hash level hash = hash.substring(0, level); // Group values MutableDouble val = hashPoints.get(hash); if (val == null) { hashPoints.put(hash, new MutableDouble(point.getValue())); } else { val.add(point.getValue()); } } catch (Exception e) { log.error(e); } } return hashPoints; }
private void updateNearestPoint( @Nonnull Body body, @Nonnull Point2D point, @Nonnull Mutable<Point2D> nearestPoint, @Nonnull MutableDouble distanceToNearestPoint) { double distanceToPoint = body.getDistanceTo(point); if (distanceToPoint >= epsilon && (nearestPoint.get() == null || distanceToPoint < distanceToNearestPoint.doubleValue())) { nearestPoint.set(point); distanceToNearestPoint.setValue(distanceToPoint); } }
private static void updateFarthestPoint( @Nonnull Body body, @Nonnull Point2D point, @Nonnull Mutable<Point2D> farthestPoint, @Nonnull MutableDouble distanceToFarthestPoint, double startAngle, double finishAngle) { double distanceToPoint = body.getDistanceTo(point); if (GeometryUtil.isAngleBetween(new Vector2D(body.getPosition(), point).getAngle(), startAngle, finishAngle) && (farthestPoint.get() == null || distanceToPoint > distanceToFarthestPoint.doubleValue())) { farthestPoint.set(point); distanceToFarthestPoint.setValue(distanceToPoint); } }
/** * standard RBF network sum of weighted distances from center */ private double predict(double[] observation) { double sum = 0; for(Pair<RBFBasis,MutableDouble> basis : network) { sum += basis.getSecond().doubleValue() * basis.getFirst().evaluate(observation); } return sum; }
/** * Interpolate gradient. * * @param gradx * the gradx * @param grady * the grady * @param px * the px * @param py * the py * @param width * the width * @param gx * the gx * @param gy * the gy */ /* * Interpolate the gradient of the gradient images gradx and grady with width * width at the point (px,py) using linear interpolation, and return the result * in (gx,gy). */ private void interpolate_gradient(float[] gradx, float[] grady, double px, double py, int width, MutableDouble gx, MutableDouble gy) { int gix, giy, gpos; double gfx, gfy, gx1, gy1, gx2, gy2, gx3, gy3, gx4, gy4; gix = (int) Math.floor(px); giy = (int) Math.floor(py); gfx = px % 1.0; ; gfy = py % 1.0; gpos = LinesUtil.LINCOOR(gix, giy, width); gx1 = gradx[gpos]; gy1 = grady[gpos]; gpos = LinesUtil.LINCOOR(gix + 1, giy, width); gx2 = gradx[gpos]; gy2 = grady[gpos]; gpos = LinesUtil.LINCOOR(gix, giy + 1, width); gx3 = gradx[gpos]; gy3 = grady[gpos]; gpos = LinesUtil.LINCOOR(gix + 1, giy + 1, width); gx4 = gradx[gpos]; gy4 = grady[gpos]; gx.setValue((1 - gfy) * ((1 - gfx) * gx1 + gfx * gx2) + gfy * ((1 - gfx) * gx3 + gfx * gx4)); gy.setValue((1 - gfy) * ((1 - gfx) * gy1 + gfx * gy2) + gfy * ((1 - gfx) * gy3 + gfx * gy4)); }
public MapRIV(final ConcurrentHashMap<Integer, MutableDouble> points, final int size) { this(size); points.forEach((i, v) -> _addPoint(i, v)); }
public MapRIV(final int size) { data = new ConcurrentHashMap<Integer, MutableDouble>(); this.size = size; }
public MapRIV(final int[] keys, final double[] vals, final int size) { this(size); final int l = keys.length; if (l != vals.length) throw new SizeMismatchException("Different quantity keys than values!"); for (int i = 0; i < l; i++) data.put(keys[i], new MutableDouble(vals[i])); }
public MapRIV destructiveSub(final MapRIV other) throws SizeMismatchException { other.data.forEach((BiConsumer<Integer, MutableDouble>) this::subtractPoint); return this; }
public double getOrDefault(final int index, final double otherVal) { final MutableDouble v = data.get(index); if (null == v) return otherVal; return v.getValue(); }
@Override public double put(final int index, final double value) { MutableDouble d = data.put(index, new MutableDouble(value)); if (null == d) return 0; else return d.getValue(); }
@Override public MutableDouble defaultAccumulatedValue() { return new MutableDouble(0.0); }
@Override public MutableDouble accumulate(MutableDouble accumulatedValue, Double input) { accumulatedValue.add(input); return accumulatedValue; }
@Override public MutableDouble merge(MutableDouble accumulatedValue1, MutableDouble accumulatedValue2) { accumulatedValue1.add(accumulatedValue2); return accumulatedValue1; }
@Override public Double getOutput(MutableDouble accumulatedValue) { return accumulatedValue.doubleValue(); }
public RBFNetworkRegression(ObservationExtractor[] extractors, int order, double[] min, double[] max, double learningRate, double initialWeight) { Preconditions.checkArgument(max.length == min.length); Preconditions.checkArgument(max.length == extractors.length); this.extractors = extractors; this.learningRate = learningRate; //check each step double[] step = new double[max.length]; for(int i=0; i< step.length ; i++) { step[i] = (max[i]-min[i])/ (double)(order-1); } double initialBandwidth = 2*(Arrays.stream(step).max().getAsDouble()) ; this.network = new LinkedList<>(); //builds combinations without recursion //taken mostly from here : http://stackoverflow.com/a/29910788/975904 int totalDimension = (int)Math.pow(order, extractors.length); for(int i = 0; i< totalDimension; i++) { double[] indices = new double[min.length]; //how often we need to reset for(int j=0; j< extractors.length; j++) { int period = (int) Math.pow(order, extractors.length - j - 1); int index = i / period % order; indices[j] = min[j]+step[j]*index; } System.out.println(Arrays.toString(indices)); network.add(new Pair<>(new RBFBasis(initialBandwidth, Arrays.copyOf(indices,indices.length)), new MutableDouble(initialWeight))); } }
/** * Line corrections. * * @param sigma * the sigma * @param w_est * the w est * @param r_est * the r est * @param w * the w * @param h * the h * @param correct * the correct * @param w_strong * the w strong * @param w_weak * the w weak * @return true, if successful */ /* * Return the correct line width w and asymmetry h, and a line position * correction correct for a line with extracted width w_est and extracted * gradient ratio r_est for a given sigma. Furthermore, return the line width on * the weak and strong side of the line. These values are obtained by bilinear * interpolation from the table ctable. */ static boolean line_corrections(double sigma, double w_est, double r_est, MutableDouble w, MutableDouble h, MutableDouble correct, MutableDouble w_strong, MutableDouble w_weak) { int i_we, i_re; boolean is_valid; double a, b; w_est = w_est / sigma; if (w_est < 2 || w_est > 6 || r_est < 0 || r_est > 1) { w.setValue(0); h.setValue(0); correct.setValue(0); w_strong.setValue(0); w_weak.setValue(0); return true; } i_we = (int) Math.floor((w_est - 2) * 10); i_re = (int) Math.floor(r_est * 20); if (i_we == 40) i_we = 39; if (i_re == 20) i_re = 19; is_valid = getCTable(i_re, i_we).is_valid && getCTable(i_re, (i_we + 1)).is_valid && getCTable((i_re + 1), i_we).is_valid && getCTable((i_re + 1), (i_we + 1)).is_valid; a = (w_est - 2) * 10 - i_we; b = r_est * 20 - i_re; w.setValue(BILINEAR(a, b, i_re, i_we, 0) * sigma); h.setValue(BILINEAR(a, b, i_re, i_we, 1)); correct.setValue(BILINEAR(a, b, i_re, i_we, 2) * sigma); w_strong.setValue(BILINEAR(a, b, i_re, i_we, 3) * sigma); w_weak.setValue(BILINEAR(a, b, i_re, i_we, 4) * sigma); return !is_valid; }
/** * Closest point. * * @param lx * the lx * @param ly * the ly * @param dx * the dx * @param dy * the dy * @param px * the px * @param py * the py * @param cx * the cx * @param cy * the cy * @param t * the t */ /* * Calculate the closest point to (px,py) on the line (lx,ly) + t*(dx,dy) and * return the result in (cx,cy), plus the parameter in t. */ private void closest_point(double lx, double ly, double dx, double dy, double px, double py, MutableDouble cx, MutableDouble cy, MutableDouble t) { double mx, my, den, nom, tt; mx = px - lx; my = py - ly; den = dx * dx + dy * dy; nom = mx * dx + my * dy; if (den != 0) tt = nom / den; else tt = 0; cx.setValue(lx + tt * dx); cy.setValue(ly + tt * dy); t.setValue(tt); }
public PortfolioValueAccumulator(MutableDouble total, PortfolioAssetInterface asset) { this.total = total; this.asset = asset; }
/** * Implementation of SpriteRelation that renders the sprite on an angle. <p> Renders sprites at the angle specified * in this.angle * * @param sprite the actual sprite instance that gets rendered * @param owner the entity this SpriteRelation is tied to * @param angle a Mutable Double holding the angle, in degrees, that this is rendered at. The purpose of a * MutableDOuble is to allow this to be modified both within this class and within the owner. Should * be between -90 and +90, with 0 being horizontal. Sprites are flipped when facing left so this * should not be accounted for outside of this class. * @param mountX the x-offset of this sprite from owner, in tiles * @param mountY the y-offset of this sprite from owner, in tiles * @param sizeX the horizontal size of this sprite, in tiles * @param sizeY the vertical size of this sprite, in tiles * @param pivotX the x-offset of the pivot point from the mounting point, in tiles * @param pivotY the x-offset of the pivot point from the mounting point, in tiles */ public AngledSpriteRelation(Sprite sprite, Entity owner, MutableDouble angle, float mountX, float mountY, float sizeX, float sizeY, float pivotX, float pivotY) { super(sprite, owner, mountX, mountY, sizeX, sizeY); this.angle = angle; this.pivotX = pivotX; this.pivotY = pivotY; }
/** * Solve the linear equation a*x+b=0 and return the result in t and the number * of solutions in num. * * @param a * the a * @param b * the b * @param t * the t * @param num * the num */ public void solve_linear(double a, double b, MutableDouble t, MutableInt num) { if (a == 0.0) { // num.setValue(0); return; } else { num.setValue(1); t.setValue(-b / a); return; } }
/** * Getter for property 'network'. * * @return Value for property 'network'. */ public LinkedList<Pair<RBFBasis, MutableDouble>> getNetwork() { return network; }