public RealMatrix nextWishart(double df, Cholesky invscale) { int d = invscale.getL().getColumnDimension(); Array2DRowRealMatrix A = new Array2DRowRealMatrix(d,d); ArrayRealVector v = new ArrayRealVector(d); for (int i=0; i<d; i++) { v.setEntry(i, sqrt(nextChiSquared(df-i))); for (int j=0; j<i; j++) { v.setEntry(j, 0.0); } for (int j=i+1; j<d; j++) { v.setEntry(j, nextGaussian()); } A.setColumnVector(i, invscale.solveLT(v)); } return A.multiply(A.transpose()); }
/** * Vector deserialization. */ public static RealVector unmarshall(byte[] bytes, boolean sparse, int dimensions) throws IOException { RealVector realVector = !sparse ? new ArrayRealVector(dimensions) : new OpenMapRealVector(dimensions); if (!sparse) { try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(bytes))) { for (int i = 0; i < dimensions; i++) { realVector.setEntry(i, dis.readDouble()); } } } else { try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(bytes))) { while (true) { try { realVector.setEntry(dis.readInt(), dis.readDouble()); } catch (EOFException e) { break; } } } } return realVector; }
public MixModel(MethyModel tumor, MethyModel normal, RealVector thetas, int nBetas, int MYTHREADS) throws InterruptedException { int nFeatures=tumor.getNaRatio().getDimension(); this.nBetas = nBetas; RealVector betas = new ArrayRealVector(nBetas); for (int i=0; i<nBetas; i++) { betas.setEntry(i,i/(nBetas-1.0)); } mixDens = new RealMatrix[nFeatures]; ExecutorService executor = Executors.newFixedThreadPool(MYTHREADS); for(int i = 0; i < nFeatures; i++) { double tumorAlpha = tumor.getAlpha().getEntry(i); double tumorBeta = tumor.getBeta().getEntry(i); BetaDistribution tumorDist = new BetaDistribution(tumorAlpha,tumorBeta); double normalAlpha = normal.getAlpha().getEntry(i); double normalBeta = normal.getBeta().getEntry(i); BetaDistribution normalDist = new BetaDistribution(normalAlpha,normalBeta); Runnable worker = new CalMixDens(tumorDist,normalDist,thetas,betas,nPoints,i,mixDens); executor.execute(worker); } executor.shutdown(); while (!executor.isTerminated()) { Thread.sleep(10000); } }
/** * Compute the n-fold Cross validation error with LWR and a given Tau * * @param tau * @param x * @param y * @return */ private Double CrossValidationError(Double tau, ArrayRealVector x, ArrayRealVector y) { int n = x.getDimension(); double totalError = 0.0; for (int i = 0; i < n; ++i) { // CV fold for i double x_i = x.getEntry(i); double y_i = y.getEntry(i); ArrayRealVector Xcv = new ArrayRealVector( (ArrayRealVector)x.getSubVector(0, i), x.getSubVector(i+1, n - (i+1)) ); ArrayRealVector Ycv = new ArrayRealVector( (ArrayRealVector)y.getSubVector(0, i), y.getSubVector(i+1, n - (i+1)) ); Double y_predicted = LWRPredict(Xcv, Ycv, x_i, tau); if (null == y_predicted) { log.error(" cp LWR cannot predict - returning NULL"); return null; } double predictionError = y_predicted - y_i; totalError += predictionError * predictionError; } return totalError; }
/** * LWR prediction * * @param X * @param Y * @param x0 * @param tau * @return */ public Double LWRPredict(ArrayRealVector X, ArrayRealVector Y, double x0, final double tau) { ArrayRealVector X0 = new ArrayRealVector(X.getDimension(), x0); ArrayRealVector delta = X.subtract(X0); ArrayRealVector sqDists = delta.ebeMultiply(delta); UnivariateFunction expTau = new UnivariateFunction() { @Override public double value(double arg0) { //log.info(" cp univariate tau " + tau); return Math.pow(Math.E, -arg0 / (2*tau)); } }; ArrayRealVector W = sqDists.map(expTau); double Xt_W_X = X.dotProduct(W.ebeMultiply(X)); if (Xt_W_X == 0.0) { log.error(" cp LWR cannot predict - 0 denominator returning NULL"); log.error("Xcv is " + X.toString()); log.error("Ycv is " + Y.toString()); log.error("x0 is " + x0); return null; // <==== NOTE: a caller must be prepared for it } double theta = ( 1.0 / Xt_W_X ) * X.ebeMultiply(W).dotProduct(Y) ; return theta * x0; }
/** * NOTE: subtle conversion that caused a bug - we use predictions * starting next time-step, but forecastCapacities are assumed * to be from current - so to return data to broker we need to * offset the record by 1 */ protected ArrayRealVector convertEnergyProfileFromServerToBroker( CapacityProfile predictedEnergyProfile, int recordLength) throws Exception { //log.info("scaleEnergyProfile()"); //log.info("predictedEnergyProfile" + Arrays.toString(predictedEnergyProfile.values.toArray())); int profileLength = predictedEnergyProfile.NUM_TIMESLOTS; // verify divides boolean divides = (recordLength / profileLength * profileLength) == recordLength; if (!divides) { throw new Exception("How come profileLength doesn't divide recordLength"); } //log.info("recordLength=" + recordLength); ArrayRealVector result = new ArrayRealVector(recordLength); for (int i = 0; i < recordLength; ++i) { result.setEntry(i, predictedEnergyProfile.getCapacity( (i + 1) % profileLength )); //log.info("appending " + predictedEnergyProfile.getCapacity( i % profileLength ) + " at " + i); } //log.info("result" + Arrays.toString(result.toArray())); return result; }
@Override public double[] smooth(double[] sourceX, double[] noisyY, double[] estimateX, double parameter) { int numDivisions = (int)Math.round(parameter); SmoothingHelper.SmoothingInput filteredInput = SmoothingHelper.filterInvalidValues(sourceX, noisyY); sourceX = filteredInput.getSourceX(); noisyY = filteredInput.getSourceY(); if(numDivisions >= sourceX.length) { throw new IllegalArgumentException("Cannot fit with " + numDivisions + " knots as the input data (after removing NAs) has < " + (numDivisions + 1) + " data points."); } double[][] sourceBasis = createBasis(numDivisions, splineDegree, sourceX); RealMatrix matrix = new Array2DRowRealMatrix(sourceBasis); QRDecomposition decomposition = new QRDecomposition(matrix); RealVector coefficients = decomposition.getSolver().solve(new ArrayRealVector(noisyY)); double[][] estimateBasis = createBasis((int)parameter, splineDegree, estimateX); RealMatrix estimateBasisMatrix = new Array2DRowRealMatrix(estimateBasis); double[] result = estimateBasisMatrix.transpose().preMultiply(coefficients).toArray(); return result; }
@Override public ArrayRealVector getPredictionForAbout7Days(CustomerInfo customerInfo, boolean customerPerspective, int currentTimeslot, boolean fixed) { // portfolioManager returns predictions from the broker's // perspective (producer has kwh > 0, consumer has kwh < 0) // so to view it from customer perspective we multiply by -1 int sign = customerPerspective ? -1 : 1; // TODO this is a temporary place holder - idealy this class // won't need the portfolioManager to get its prediction RealVector energy = portfolioManager.getGeneralRawUsageForCustomer(customerInfo, fixed).mapMultiply(sign); // sanity check if (energy.getDimension() != 7 * 24) { log.error("Expecting energy dimension to be 7 * 24 - unexpected behavior might happen"); } // rotate to start from current time return BrokerUtils.rotateWeeklyRecordAndAppendTillEndOfDay(energy, currentTimeslot); }
@Override public TreeMap<Double, TariffSpecification> optimizeTariffs( HashMap<TariffSpecification, HashMap<CustomerInfo, Integer>> tariffSubscriptions, HashMap<CustomerInfo, ArrayRealVector> customer2estimatedEnergy, List<TariffSpecification> competingTariffs, MarketManager marketManager, ContextManager contextManager, CostCurvesPredictor costCurvesPredictor, int currentTimeslot, Broker me) { // seed will be the best fixed-rate tariff TreeMap<Double, TariffSpecification> sortedTariffs = tariffOptimizerFixedRate.optimizeTariffs(tariffSubscriptions, customer2estimatedEnergy, competingTariffs, marketManager, contextManager, costCurvesPredictor, currentTimeslot, me); TariffSpecification fixedRateSeed = extractBestTariffSpec(sortedTariffs); TotalEnergyRecords energyRecords = sumTotalEnergy(customer2estimatedEnergy, tariffSubscriptions, currentTimeslot); ArrayRealVector energyUnitCosts = computeEnergyUnitCosts(costCurvesPredictor, energyRecords.getMyCustomersEnergy(), energyRecords.getCompetitorCustomersEnergy(), currentTimeslot); double avgMargin = computeAvgMargin(energyUnitCosts, fixedRateSeed); TariffSpecification touSpec = createTOUFixedMargin(energyUnitCosts, avgMargin, currentTimeslot, me); // create a result map with 1 tariff TreeMap<Double, TariffSpecification> eval2spec = new TreeMap<Double, TariffSpecification>(); eval2spec.put(0.0, touSpec); return eval2spec; }
/** * returns positive margin between selling unit-price and buying unit-cost * @param energyUnitCosts * @param fixedRateSeed * @return */ private double computeAvgMargin(ArrayRealVector energyUnitCosts, TariffSpecification fixedRateSeed) { double totalMargin = 0; // next: '-' to adjust sign to broker perspective (i.e. make it positive) double sellingPricePerKwh = -(fixedRateSeed.getRates().get(0).getValue()); for (int i = 0; i < energyUnitCosts.getDimension(); ++i ) { double buyingPricePerKwh = energyUnitCosts.getEntry(i); // next: '+' since buyingPricePerKwh is signed (i.e. negative) double margin = sellingPricePerKwh + buyingPricePerKwh; totalMargin += margin; log.debug("computeAvgMargin(): sellingPricePerKwh=" + sellingPricePerKwh + " buyingPricePerKwh=" + buyingPricePerKwh + "margin =" + margin + " totalMargin=" + totalMargin); } double avgMargin = totalMargin / energyUnitCosts.getDimension(); log.debug("avgMargin=" + avgMargin); return avgMargin; }
private TariffSpecification createTOUFixedMargin( ArrayRealVector energyUnitCosts, double avgMargin, int currentTimeslot, Broker me) { int currentHourOfDay = (currentTimeslot - 360) % 24; TariffSpecification spec = new TariffSpecification(me, PowerType.CONSUMPTION); int firstHourOfPrediction = currentHourOfDay + 1; for (int i = 0; i < NUM_RATES; ++i) { // +/- confusion: energyUnitCosts contains negative values and margin is positive double rateValue = energyUnitCosts.getEntry(i) - avgMargin; int hour = (firstHourOfPrediction + i) % 24; log.debug("adding rate, hour=" + hour + " rate=" + rateValue); Rate rate = new Rate().withValue(rateValue).withDailyBegin(hour).withDailyEnd(hour); spec.addRate(rate); } return spec; }
/** {@inheritDoc} */ @Override protected PointValuePair doOptimize() { final double[] lowerBound = getLowerBound(); final double[] upperBound = getUpperBound(); // Validity checks. setup(lowerBound, upperBound); isMinimize = (getGoalType() == GoalType.MINIMIZE); currentBest = new ArrayRealVector(getStartPoint()); final double value = bobyqa(lowerBound, upperBound); return new PointValuePair(currentBest.getDataRef(), isMinimize ? value : -value); }
@Override public Pair<RealVector, RealMatrix> value(RealVector point) { // input double[] pointArray = point.toArray(); // output double[] resultPoint = new double[this.distances.length]; // compute least squares for (int i = 0; i < resultPoint.length; i++) { resultPoint[i] = 0.0; // calculate sum, add to overall for (int j = 0; j < pointArray.length; j++) { resultPoint[i] += (pointArray[j] - this.getPositions()[i][j]) * (pointArray[j] - this.getPositions()[i][j]); } resultPoint[i] -= (this.getDistances()[i]) * (this.getDistances()[i]); } RealMatrix jacobian = jacobian(point); return new Pair<RealVector, RealMatrix>(new ArrayRealVector(resultPoint), jacobian); }
/** * * @param forwardPath lattice of forwardPath * @param backwardPath lattice of backwardPath * @param nBestList array of arraylists (of string type) with n-best list for each entity span * @return best forward + best backward path */ public String bestMergedPath(double[][] forwardPath, double[][] backwardPath, List<String>[] nBestList) { int length = forwardPath.length; for (int i = 0; i < forwardPath.length; i++) { for (int j = 0; j < nBestList[i].size(); j++) { //System.out.println(forwardPath[i][j] + ", " + backwardPath[length - 1 - i][j] + ": " + nBestList[i].get(j)); forwardPath[i][j] += backwardPath[length - 1 - i][j]; } } StringBuilder bestPath = new StringBuilder(); for (int i = 0; i < forwardPath.length; i++) { RealVector realVector = new ArrayRealVector(forwardPath[i]); int bestPathIndex = realVector.getMaxIndex(); bestPath.append(nBestList[i].get(bestPathIndex)); bestPath.append(CANDIDATE_DELIMITER); } return bestPath.toString(); }
@Override public void update(@Nonnull final Object arg, @Nonnull final double[] outScores) throws HiveException { ArrayRealVector x = parseX(arg); // [Stage#1] Outlier Detection xRing.add(x).toArray(xSeries, false /* LIFO */); int k1 = xRing.size() - 1; RealVector x_hat = sdar1.update(xSeries, k1); double scoreX = (k1 == 0.d) ? 0.d : loss(x, x_hat, lossFunc1); // smoothing double y = ChangeFinderUDF.smoothing(outlierScores.add(scoreX)); // [Stage#2] Change-point Detection yRing.add(y).toArray(ySeries, false /* LIFO */); int k2 = yRing.size() - 1; double y_hat = sdar2.update(ySeries, k2); double lossY = (k2 == 0.d) ? 0.d : loss(y, y_hat, lossFunc1); double scoreY = ChangeFinderUDF.smoothing(changepointScores.add(lossY)); outScores[0] = scoreX; outScores[1] = scoreY; }
public RotationProcessModel() { super(); // A = stateTransitionMatrix stateTransitionMatrix = new Array2DRowRealMatrix(new double[][] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 }, { 0, 0, 0, 1 } }); // B = stateTransitionMatrix controlMatrix = new Array2DRowRealMatrix(new double[][] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 }, { 0, 0, 0, 1 } }); // Q = processNoiseCovMatrix processNoiseCovMatrix = new Array2DRowRealMatrix(new double[][] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 }, { 0, 0, 0, 1 } }); // xP = initialStateEstimateVector initialStateEstimateVector = new ArrayRealVector(new double[] { 0, 0, 0, 0 }); // P0 = initialErrorCovMatrix; initialErrorCovMatrix = new Array2DRowRealMatrix(new double[][] { { 0.1, 0, 0, 0 }, { 0, 0.1, 0, 0 }, { 0, 0, 0.1, 0 }, { 0, 0, 0, 0.1 } }); }
@Override public SlopeCoefficients estimateCoefficients(final DerivationEquation eq) throws EstimationException { final double[][] sourceTriangleMatrix = eq.getCovarianceLowerTriangularMatrix(); // Copy matrix and enhance it to a full matrix as expected by CholeskyDecomposition // FIXME: Avoid copy job to speed-up the solving process e.g. by extending the CholeskyDecomposition constructor final int length = sourceTriangleMatrix.length; final double[][] matrix = new double[length][]; for (int i = 0; i < length; i++) { matrix[i] = new double[length]; final double[] s = sourceTriangleMatrix[i]; final double[] t = matrix[i]; for (int j = 0; j <= i; j++) { t[j] = s[j]; } for (int j = i + 1; j < length; j++) { t[j] = sourceTriangleMatrix[j][i]; } } final RealMatrix coefficients = new Array2DRowRealMatrix(matrix, false); try { final DecompositionSolver solver = new CholeskyDecomposition(coefficients).getSolver(); final RealVector constants = new ArrayRealVector(eq.getConstraints(), true); final RealVector solution = solver.solve(constants); return new DefaultSlopeCoefficients(solution.toArray()); } catch (final NonPositiveDefiniteMatrixException e) { throw new EstimationException("Matrix inversion error due to data is linearly dependent", e); } }
public static void main(String[] cmd_args) throws IOException { Double[][] data = readDoubleMatrix("src/nrmix/spike6x2000.data"); RealVector[] Data = new RealVector[data.length]; for (int i=0; i<data.length; i++) { Data[i] = new ArrayRealVector(data[i]); } RealVector[] Pred = new RealVector[0]; int numBurnin = 1000; int numSample = 5000; int numThinning = 10; int numNewClusters = 1; int numPrint = 10; double alphaShape = 1; double alphaInvScale = 1; double sigmaAlpha = 1; double sigmaBeta = 2; double tauShape = 1e9; double tauInvScale = 1e9; int numdim = Data[0].getDimension(); double meanRelScale = 1.0; double precisionDegFreedom = numdim+3.0; double invScaleDegFreedom = numdim-0.6; double precisionScale = 50.0; /* nrmixmv.run("neal8",Data,Pred,"output.nrmix.spikes", numBurnin, numSample, numThinning, numNewClusters, numPrint, alphaShape, alphaInvScale, sigmaAlpha, sigmaBeta, tauShape, tauInvScale, true, meanRelScale, precisionDegFreedom, invScaleDegFreedom, precisionScale); */ //nrmix.nrmix.run(true,false,"slice",data,"testlogacidslice"); }
/** * Independent Gaussian, Inverse Gamma non-conjugate prior for mean, precision * parameter of Gaussian exponential family. * @param prior a MVNormalWishartIndependent prior. */ public MVNormalNonConjugateHierarchy(MVNormalWishartIndependent prior) { this.prior = prior; sumX = new ArrayRealVector(prior.getNumDimension()); sumXX = new Array2DRowRealMatrix(prior.getNumDimension(),prior.getNumDimension()); number = 0; constant = prior.getNumDimension()*neghalflog2pi; }
static public RealVector double2RealVector(double[] x) { RealVector y = new ArrayRealVector(x.length); for (int i=0; i<x.length; i++) { y.setEntry(i, x[i]); } return y; }
@Override public VectorResponse getVector(VectorRequest request) { Map<String, Map<Integer, Double>> terms = new HashMap<>(); request.getTerms().forEach(t -> terms.put(t, RealVectorUtil.vectorToMap(new ArrayRealVector(rvg.nextVector())))); VectorResponse response = new SparseVectorResponse(request, terms); return response; }
public MockCachedVectorSpace(VectorComposer termComposer, VectorComposer translationComposer) { super(termComposer, translationComposer); vectors.put("throne", new ArrayRealVector(new double[]{5, 6, 7, 8, 9})); vectors.put("love", new ArrayRealVector(new double[]{1, 0, 0, 0, 0})); vectors.put("plane", new ArrayRealVector(new double[]{0, 1, 0, 0, 0})); vectors.put("good", new ArrayRealVector(new double[]{0, 0, 1, 0, 0})); vectors.put("hot", new ArrayRealVector(new double[]{0, 0, 0, 1, 0})); vectors.put("south", new ArrayRealVector(new double[]{0, 0, 0, 0, 1})); vectors.put("hate", new ArrayRealVector(new double[]{-1, 0, 0, 0, 0})); vectors.put("car", new ArrayRealVector(new double[]{0, -1, 0, 0, 0})); vectors.put("bad", new ArrayRealVector(new double[]{0, 0, -1, 0, 0})); vectors.put("cold", new ArrayRealVector(new double[]{0, 0, 0, -1, 0})); vectors.put("north", new ArrayRealVector(new double[]{0, 0, 0, 0, -1})); vectors.put("mother", new ArrayRealVector(new double[]{3, 3, 0, 0, 0})); vectors.put("mom", new ArrayRealVector(new double[]{0, 0, 3, 0, 0})); vectors.put("matriarch", new ArrayRealVector(new double[]{0, 0, 0, 3, 3})); vectors.put("father", new ArrayRealVector(new double[]{-3, -3, 0, 0, 0})); vectors.put("dad", new ArrayRealVector(new double[]{0, 0, -3, 0, 0})); vectors.put("patriarch", new ArrayRealVector(new double[]{0, 0, 0, -3, -3})); vectors.put("machine", new ArrayRealVector(new double[]{2, 2, 0, 0, 0})); vectors.put("computer", new ArrayRealVector(new double[]{0, 0, 2, 2, 2})); vectors.put("test", new ArrayRealVector(new double[]{-2, -2, 0, 0, 0})); vectors.put("evaluation", new ArrayRealVector(new double[]{0, 0, -2, -2, -2})); //stemmed vectors.put("machin", vectors.get("machine")); vectors.put("comput", vectors.get("computer")); vectors.put("evalu", vectors.get("evaluation")); }
/** * Calculates the total error for the estimated fit as * the sum of the squared Euclidean distances between the * transformed point set X and the reference set Y. * This method is provided for testing as an alternative to * the quicker {@link getError} method. * @param X Sequence of n-dimensional points. * @param Y Sequence of n-dimensional points (reference). * @return The total error for the estimated fit. */ public double getEuclideanError(List<double[]> X, List<double[]> Y) { RealMatrix cR = R.scalarMultiply(c); double ee = 0; for (int i = 0; i < X.size(); i++) { RealVector ai = new ArrayRealVector(X.get(i)); RealVector bi = new ArrayRealVector(Y.get(i)); RealVector aiT = cR.operate(ai).add(t); double ei = aiT.subtract(bi).getNorm(); ee = ee + sqr(ei); } return ee; }
@Override public void fit(List<double[]> X, List<double[]> Y) { // fits n-dimensional data sets with affine model if (X.size() != Y.size()) throw new IllegalArgumentException("point sequences X, Y must have same length"); this.m = X.size(); this.n = X.get(0).length; RealMatrix M = MatrixUtils.createRealMatrix(2 * m, 2 * (n + 1)); RealVector b = new ArrayRealVector(2 * m); // mount matrix M: int row = 0; for (double[] x : X) { for (int j = 0; j < n; j++) { M.setEntry(row, j, x[j]); M.setEntry(row, n, 1); row++; } for (int j = 0; j < n; j++) { M.setEntry(row, j + n + 1, x[j]); M.setEntry(row, 2 * n + 1, 1); row++; } } // mount vector b row = 0; for (double[] y : Y) { for (int j = 0; j < n; j++) { b.setEntry(row, y[j]); row++; } } SingularValueDecomposition svd = new SingularValueDecomposition(M); DecompositionSolver solver = svd.getSolver(); RealVector a = solver.solve(b); A = makeTransformationMatrix(a); }
public float[] solveFToF(float[] b) { RealVector bVec = new ArrayRealVector(b.length); for (int i = 0; i < b.length; i++) { bVec.setEntry(i, b[i]); } RealVector resultVec = solver.solve(bVec); float[] result = new float[resultVec.getDimension()]; for (int i = 0; i < result.length; i++) { result[i] = (float) resultVec.getEntry(i); } return result; }
/** * Creates the Y vector for this regression model * @return the Y vector for regression model */ RealVector createY() { final int rowCount = frame.rows().count(); final int colIndex = frame.cols().ordinalOf(regressand); final RealVector y = new ArrayRealVector(rowCount); for (int i = 0; i < rowCount; ++i) { y.setEntry(i, frame.data().getDouble(i, colIndex)); } return y; }
/** * Computes the Total Sum of Squares for regressand * @param y the vector with dependent variable observations * @return the Total Sum of Squares for regressand */ protected double computeTSS(RealVector y) { if (!hasIntercept()) { return y.dotProduct(y); } else { final double[] values = y.toArray(); final double mean = DoubleStream.of(values).average().orElse(Double.NaN); final double[] demeaned = DoubleStream.of(values).map(v -> v - mean).toArray(); final RealVector demeanedVector = new ArrayRealVector(demeaned); return demeanedVector.dotProduct(demeanedVector); } }
private static RealVector createRealVector(Dataset a) { if (a.getRank() != 1) { throw new IllegalArgumentException("Dataset must be rank 1"); } int size = a.getSize(); IndexIterator it = a.getIterator(true); int[] pos = it.getPos(); RealVector m = new ArrayRealVector(size); while (it.hasNext()) { m.setEntry(pos[0], a.getElementDoubleAbs(it.index)); } return m; }
private static Dataset createDataset(RealVector v) { DoubleDataset r = DatasetFactory.zeros(DoubleDataset.class, v.getDimension()); int size = r.getSize(); if (v instanceof ArrayRealVector) { double[] data = ((ArrayRealVector) v).getDataRef(); for (int i = 0; i < size; i++) { r.setAbs(i, data[i]); } } else { for (int i = 0; i < size; i++) { r.setAbs(i, v.getEntry(i)); } } return r; }
/** * @return a comparator for sorting the optima. */ private Comparator<PointVectorValuePair> getPairComparator() { return new Comparator<PointVectorValuePair>() { /** Observed value to be matched. */ private final RealVector target = new ArrayRealVector(optimizer.getTarget(), false); /** Observations weights. */ private final RealMatrix weight = optimizer.getWeight(); /** {@inheritDoc} */ public int compare(final PointVectorValuePair o1, final PointVectorValuePair o2) { if (o1 == null) { return (o2 == null) ? 0 : 1; } else if (o2 == null) { return -1; } return Double.compare(weightedResidual(o1), weightedResidual(o2)); } private double weightedResidual(final PointVectorValuePair pv) { final RealVector v = new ArrayRealVector(pv.getValueRef(), false); final RealVector r = target.subtract(v); return r.dotProduct(weight.operate(r)); } }; }
private ArrayRealVector createNormalizedXVector(Set<Double> xValues, double min, double max) { Double[] dummy1 = new Double[1]; // needed to determine the type of toArray? ArrayRealVector xVector = new ArrayRealVector(xValues.toArray(dummy1)); xVector.mapSubtractToSelf(min); xVector.mapDivideToSelf(max - min); // translating [0,1]=>[0.1,0.9] xVector.mapMultiplyToSelf(SQUEEZE); xVector.mapAddToSelf(OFFSET); return xVector; }
private ArrayRealVector createYVector(Collection<Double> yValues) { // couldn't create ArrayRealVector from Integer collection // so just manually copying values double[] doubleArray = new double[yValues.size()]; int i = 0; for (Iterator<Double> it = yValues.iterator(); it.hasNext(); ++i) { doubleArray[i] = it.next(); } return new ArrayRealVector(doubleArray); }
@Override public ArrayRealVector getPredictedEnergy(TariffSubscription subscription, int recordLength, int currentTimeslot) throws Exception { CapacityProfile predictedEnergyProfile = getForecastPerSubStartingAt(currentTimeslot, currentTimeslot, subscription); // elasticity //if (ConfigServerBroker.useElasticity()) { predictedEnergyProfile = adjustCapacityProfileForTariffRates(predictedEnergyProfile, currentTimeslot, subscription); //} //log.info("adaptivecaporig " + Arrays.toString(predictedEnergyProfile.values.toArray())); return convertEnergyProfileFromServerToBroker(predictedEnergyProfile, recordLength); }
private void fillCustomersWithEstimatedEnergy( HashMap<CustomerInfo, ArrayRealVector> customer2estimatedEnergy, int currentTimeslot) { List<FactoredCustomer> customers = factoredCustomerService.getCustomers(); for (FactoredCustomer customer : customers) { // get all consumption bundles, extract CustomerInfo, ..., update energy record for bundle's originators (assuming there is one?) for (CapacityBundle bundle : customer.getCapacityBundlesOfTypeThatCanUse(PowerType.CONSUMPTION)) { // convert bundle to CustomerInfo, and energy record to whole population CustomerInfo custInfo = brokerCustInfos.get(bundle.getCustomerInfo().getName()); RealVector populationEstimatedEnergy = customer2estimatedEnergy.get(custInfo).mapMultiply(bundle.getCustomerInfo().getPopulation()); customer.updateEnergyRecord(bundle, populationEstimatedEnergy, currentTimeslot); } } }
@Override public HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>> updateEstimatedEnergyWithShifting( HashMap<CustomerInfo, ArrayRealVector> customer2estimatedEnergy, HashMap<TariffSpecification, HashMap<CustomerInfo, Double>> predictedCustomerSubscriptions, int currentTimeslot) { HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>> result = new HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>>(); // add same customer=>energy mapping for all possible tariff-specs // to make a customer=>[spec]=>energy structure for ( Entry<TariffSpecification, HashMap<CustomerInfo, Double>> entry : predictedCustomerSubscriptions.entrySet()) { TariffSpecification spec = entry.getKey(); for (CustomerInfo cust : entry.getValue().keySet()) { ArrayRealVector energy = customer2estimatedEnergy.get(cust); // get, or create if doesn't exist HashMap<TariffSpecification, ShiftedEnergyData> spec2energy = result.get(cust); if (null == spec2energy) { spec2energy = new HashMap<TariffSpecification, ShiftedEnergyData>(); result.put(cust, spec2energy); } spec2energy.put(spec, new ShiftedEnergyData(energy, 0.0)); // no inconvenience } } return result; }
/** * Find the radii of the ellipsoid in ascending order. * @param evals the eigenvalues of the ellipsoid. * @return the radii of the ellipsoid. */ private RealVector findRadii(double[] evals) { RealVector radii = new ArrayRealVector(evals.length); // radii[i] = sqrt(1/eval[i]); for (int i = 0; i < evals.length; i++) { radii.setEntry(i, Math.sqrt(1 / evals[i])); } return radii; }
@Override abstract public TreeMap<Double, TariffSpecification> optimizeTariffs( HashMap<TariffSpecification, HashMap<CustomerInfo, Integer>> tariffSubscriptions, HashMap<CustomerInfo, ArrayRealVector> customer2estimatedEnergy, List<TariffSpecification> competingTariffs, MarketManager marketManager, ContextManager contextManager, CostCurvesPredictor costCurvesPredictor, int currentTimeslot, Broker me);
protected HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>> estimateShiftedPredictions( HashMap<CustomerInfo, ArrayRealVector> customer2estimatedEnergy, //List<TariffSpecification> suggestedSpecs, //HashMap<TariffSpecification, HashMap<CustomerInfo, Integer>> tariffSubscriptions, //List<TariffSpecification> competingTariffs, List<TariffSpecification> specs, int currentTimeslot) { HashMap<TariffSpecification,HashMap<CustomerInfo,Double>> dummySubscriptions = new HashMap<TariffSpecification, HashMap<CustomerInfo,Double>>(); for (TariffSpecification spec : specs) { dummySubscriptions.put(spec, new HashMap<CustomerInfo, Double>()); Set<CustomerInfo> customers = customer2estimatedEnergy.keySet(); for (CustomerInfo customer : customers) { if (customer.getPowerType().canUse(spec.getPowerType())) { dummySubscriptions.get(spec).put(customer, 1.); } } } // predict shifting effects, or no shifting, depending on shiftingPredictor HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>> customer2ShiftedEnergy = shiftingPredictor.updateEstimatedEnergyWithShifting( customer2estimatedEnergy, dummySubscriptions, currentTimeslot); return customer2ShiftedEnergy; }
@Override public ArrayRealVector getMarketAvgPricesArrayKwh() { RealVector marketKWh_ = new ArrayRealVector(marketMWh).mapMultiplyToSelf(1000.0).mapAddToSelf(1e-9); // avoid 0-div ArrayRealVector marketPayments_ = new ArrayRealVector(marketPayments); // element by element division ArrayRealVector marketAvgPricePerSlot = marketPayments_.ebeDivide(marketKWh_); return marketAvgPricePerSlot; }