private NoveltyScores score(final RealMatrix kernelMatrix) { // projected test samples: final RealMatrix projectionVectors = kernelMatrix.transpose().multiply(m_projection); // differences to the target value: final RealMatrix diff = projectionVectors.subtract( MatrixFunctions.ones(kernelMatrix.getColumnDimension(), 1) .scalarMultiply(m_targetPoints.getEntry(0, 0))); // distances to the target value: final RealVector scoresVector = MatrixFunctions.sqrt(MatrixFunctions .rowSums(MatrixFunctions.multiplyElementWise(diff, diff))); return new NoveltyScores(scoresVector.toArray(), projectionVectors); }
private static RealMatrix centerKernelMatrix(final RealMatrix kernelMatrix) { // get size of kernelMatrix final int n = kernelMatrix.getRowDimension(); // get mean values for each row/column final RealVector columnMeans = MatrixFunctions.columnMeans(kernelMatrix); final double matrixMean = MatrixFunctions.mean(kernelMatrix); RealMatrix centeredKernelMatrix = kernelMatrix.copy(); for (int k = 0; k < n; k++) { centeredKernelMatrix.setRowVector(k, centeredKernelMatrix.getRowVector(k).subtract(columnMeans)); centeredKernelMatrix.setColumnVector(k, centeredKernelMatrix .getColumnVector(k).subtract(columnMeans)); } centeredKernelMatrix = centeredKernelMatrix.scalarAdd(matrixMean); return centeredKernelMatrix; }
private RealVector unmarshall(Document doc, int limit) { if (!metadata.isBinary()) { throw new UnsupportedOperationException("Can't consume non-binary models."); } try { final Binary binary = doc.get(VECTOR_FIELD_NAME, Binary.class); final byte[] b = binary.getData(); if (metadata.getLoaderId().equalsIgnoreCase("legacy")) { return BinaryCodecs.legacyUnmarshall(b, limit, metadata.isSparse(), metadata.getDimensions()); } else { return BinaryCodecs.unmarshall(b, metadata.isSparse(), metadata.getDimensions()); } } catch (Exception e) { logger.error("Error unmarshalling vector", e); } return null; }
@Override public double sim(RealVector r1, RealVector r2, boolean sparse) { if (r1.getDimension() != r2.getDimension()) { return 0; } double min = 0.0; double sum = 0.0; for (int i = 0; i < r1.getDimension(); ++i) { if (r1.getEntry(i) > r2.getEntry(i)) { min += r2.getEntry(i); } else { min += r1.getEntry(i); } sum += r1.getEntry(i) + r2.getEntry(i); } if (sum == 0) { return 0; } double result = 2 * min / sum; return Math.abs(result); }
@Override public double sim(RealVector r1, RealVector r2, boolean sparse) { if (r1.getDimension() != r2.getDimension()) { return 0; } double max = 0; double tmp; for (int i = 0; i < r1.getDimension(); ++i) { tmp = Math.abs((r1.getEntry(i) - r2.getEntry(i))); max = (tmp > max ? tmp : max); } double result = 1 / (1 + (max == Double.NaN ? 0 : max)); return Math.abs(result); }
@Override public double sim(RealVector r1, RealVector r2, boolean sparse) { if (r1.getDimension() != r2.getDimension()) { return 0; } double alpha = 0.99; double divergence = 0.0; for (int i = 0; i < r1.getDimension(); ++i) { if (r1.getEntry(i) > 0.0 && r2.getEntry(i) > 0.0) { divergence += r1.getEntry(i) * Math.log(r1.getEntry(i) / ((1 - alpha) * r1.getEntry(i) + alpha * r2.getEntry(i))); } } double result = (1 - (divergence / Math.sqrt(2 * Math.log(2)))); return Math.abs(result); }
@Override public double sim(RealVector r1, RealVector r2, boolean sparse) { if (r1.getDimension() != r2.getDimension()) { return 0; } double min = 0.0; double max = 0.0; for (int i = 0; i <r1.getDimension(); ++i) { if (r1.getEntry(i) > r2.getEntry(i)) { min +=r2.getEntry(i); max += r1.getEntry(i); } else { min += r1.getEntry(i); max += r2.getEntry(i); } } if (max == 0) { return 0; } return Math.abs(min / max); }
/** * Vector deserialization. */ public static RealVector unmarshall(byte[] bytes, boolean sparse, int dimensions) throws IOException { RealVector realVector = !sparse ? new ArrayRealVector(dimensions) : new OpenMapRealVector(dimensions); if (!sparse) { try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(bytes))) { for (int i = 0; i < dimensions; i++) { realVector.setEntry(i, dis.readDouble()); } } } else { try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(bytes))) { while (true) { try { realVector.setEntry(dis.readInt(), dis.readDouble()); } catch (EOFException e) { break; } } } } return realVector; }
public Map<String, Double> getRelatedness(String one, List<String> many, boolean translated) { List<? extends AnalyzedTerm> analyzedTerms = doAnalyze(one, many); Map<String, RealVector> vectors; if(translated) { vectors = vectorSpace.getTranslatedVectors((List<MutableTranslatedTerm>) analyzedTerms); } else { vectors = vectorSpace.getVectors((List<AnalyzedTerm>) analyzedTerms); } Map<String, Double> results = new LinkedHashMap<>(); RealVector oneVector = vectors.get(one); for (String m : many) { RealVector mVector = vectors.get(m); if (oneVector != null && mVector != null) { double score = func.sim(oneVector, mVector, vectorSpace.getMetadata().isSparse()); results.put(m, score); } else { results.put(m, 0d); } } return results; }
@Override public Map<String, RealVector> getVectors(List<AnalyzedTerm> terms) { if (terms == null) { throw new IllegalArgumentException("terms can't be null"); } Set<String> allTerms = new HashSet<>(); terms.forEach(t -> allTerms.addAll(t.getAnalyzedTokens())); collectVectors(allTerms, getMetadata().getDimensions()); Map<String, RealVector> vectors = new HashMap<>(); for (AnalyzedTerm term : terms) { RealVector vector = composeVectors(term.getAnalyzedTokens(), getTermComposer()); vectors.put(term.getTerm(), vector); } return vectors; }
@Override public RealVector compose(List<RealVector> vectors) { logger.trace("Composing {} vectors", vectors.size()); if (vectors.isEmpty()) { return null; } else if (vectors.size() == 1) { return vectors.get(0); } else { RealVector sum = vectors.get(0).add(vectors.get(1)); for (int i = 2; i < vectors.size(); i++) { sum = sum.add(vectors.get(i)); } return sum; } }
@Test public void getComposedTranslatedVectorsTest() { IndraAnalyzer ptAnalyzer = new IndraAnalyzer("PT", ModelMetadata.createTranslationVersion(vectorSpace.getMetadata())); List<String> terms = Arrays.asList("mãe computador", "pai avaliação"); List<MutableTranslatedTerm> analyzedTerms = terms.stream().map(t -> new MutableTranslatedTerm(t, ptAnalyzer.analyze(t))).collect(Collectors.toList()); analyzedTerms.get(0).putAnalyzedTranslatedTokens("mãe", Arrays.asList("mother", "mom", "matriarch")); analyzedTerms.get(0).putAnalyzedTranslatedTokens("computador", Arrays.asList("machine", "computer")); analyzedTerms.get(1).putAnalyzedTranslatedTokens("pai", Arrays.asList("father", "dad", "patriarch")); analyzedTerms.get(1).putAnalyzedTranslatedTokens("avaliação", Arrays.asList("test", "evaluation")); Map<String, RealVector> vectorPairs = vectorSpace.getTranslatedVectors(analyzedTerms); Assert.assertEquals(vectorPairs.size(), 2); Assert.assertEquals(vectorPairs.get(terms.get(0)), MockCachedVectorSpace.TWO_VECTOR); Assert.assertEquals(vectorPairs.get(terms.get(1)), MockCachedVectorSpace.NEGATIVE_TWO_VECTOR); }
public MixModel(MethyModel tumor, MethyModel normal, RealVector thetas, int nBetas, int MYTHREADS) throws InterruptedException { int nFeatures=tumor.getNaRatio().getDimension(); this.nBetas = nBetas; RealVector betas = new ArrayRealVector(nBetas); for (int i=0; i<nBetas; i++) { betas.setEntry(i,i/(nBetas-1.0)); } mixDens = new RealMatrix[nFeatures]; ExecutorService executor = Executors.newFixedThreadPool(MYTHREADS); for(int i = 0; i < nFeatures; i++) { double tumorAlpha = tumor.getAlpha().getEntry(i); double tumorBeta = tumor.getBeta().getEntry(i); BetaDistribution tumorDist = new BetaDistribution(tumorAlpha,tumorBeta); double normalAlpha = normal.getAlpha().getEntry(i); double normalBeta = normal.getBeta().getEntry(i); BetaDistribution normalDist = new BetaDistribution(normalAlpha,normalBeta); Runnable worker = new CalMixDens(tumorDist,normalDist,thetas,betas,nPoints,i,mixDens); executor.execute(worker); } executor.shutdown(); while (!executor.isTerminated()) { Thread.sleep(10000); } }
private RealMatrix makeDataMatrix(List<double[]> X, double[] meanX) { if (meanX == null) { return makeDataMatrix(X); } final int m = X.size(); final int n = X.get(0).length; RealMatrix M = MatrixUtils.createRealMatrix(n, m); RealVector mean = MatrixUtils.createRealVector(meanX); int i = 0; for (double[] x : X) { RealVector xi = MatrixUtils.createRealVector(x).subtract(mean); M.setColumnVector(i, xi); i++; } return M; }
/** * Runs the regression model for the given dependent and independent variables * The Y and X variables must be transformed, if necessary, to meet Gauss Markov assumptions * @param y the dependent variable, which may be a transformed version of the raw data * @param x the independent variable(s), which may be a transformed version of the raw data */ protected void compute(RealVector y, RealMatrix x) { final int n = frame.rows().count(); final int p = regressors.size() + (hasIntercept() ? 1 : 0); final int dfModel = regressors.size(); final RealMatrix betaMatrix = computeBeta(y, x); final RealVector betaCoefficients = betaMatrix.getColumnVector(0); final RealVector betaVariance = betaMatrix.getColumnVector(1); this.tss = computeTSS(y); this.ess = tss - rss; this.fValue = (ess / dfModel) / (rss / (n - p)); this.fValueProbability = 1d - new FDistribution(dfModel, n-p).cumulativeProbability(fValue); this.rSquared = 1d - (rss / tss); this.rSquaredAdj = 1d - (rss * (n - (hasIntercept() ? 1 : 0))) / (tss * (n - p)); this.computeParameterStdErrors(betaVariance); this.computeParameterSignificance(betaCoefficients); }
/** * Calculates the standard errors of the regression parameters. * @param betaVar the variance of the beta parameters * @throws DataFrameException if this operation fails */ private void computeParameterStdErrors(RealVector betaVar) { try { final int offset = hasIntercept() ? 1 : 0; if (hasIntercept()) { final double interceptVariance = betaVar.getEntry(0); final double interceptStdError = Math.sqrt(interceptVariance); this.intercept.data().setDouble(0, Field.STD_ERROR, interceptStdError); } for (int i = 0; i < regressors.size(); i++) { final double betaVar_i = betaVar.getEntry(i + offset); final double betaStdError = Math.sqrt(betaVar_i); this.betas.data().setDouble(i, Field.STD_ERROR, betaStdError); } } catch (Exception ex) { throw new DataFrameException("Failed to calculate regression coefficient standard errors", ex); } }
/** * Predict the internal state estimation one time step ahead. * * @param u * the control vector * @throws DimensionMismatchException * if the dimension of the control vector does not match */ public void predict(final RealVector u) throws DimensionMismatchException { // sanity checks if (u != null && u.getDimension() != controlMatrix.getColumnDimension()) { throw new DimensionMismatchException(u.getDimension(), controlMatrix.getColumnDimension()); } // project the state estimation ahead (a priori state) // xHat(k)- = A * xHat(k-1) + B * u(k-1) // stateEstimation = transitionMatrix.operate(stateEstimation); // add control input if it is available // if (u != null) // { // stateEstimation = stateEstimation.add(controlMatrix.operate(u)); // } // We don't need to use the transition matrix or control matrix, since // we have already done all the work... we can just set the state // estimation to u. if (u != null) { stateEstimation = u; } // project the error covariance ahead // P(k)- = A * P(k-1) * A' + Q errorCovariance = transitionMatrix.multiply(errorCovariance) .multiply(transitionMatrixT) .add(processModel.getProcessNoise()); }
@Override public SlopeCoefficients estimateCoefficients(final DerivationEquation eq) throws EstimationException { final double[][] sourceTriangleMatrix = eq.getCovarianceLowerTriangularMatrix(); // Copy matrix and enhance it to a full matrix as expected by CholeskyDecomposition // FIXME: Avoid copy job to speed-up the solving process e.g. by extending the CholeskyDecomposition constructor final int length = sourceTriangleMatrix.length; final double[][] matrix = new double[length][]; for (int i = 0; i < length; i++) { matrix[i] = new double[length]; final double[] s = sourceTriangleMatrix[i]; final double[] t = matrix[i]; for (int j = 0; j <= i; j++) { t[j] = s[j]; } for (int j = i + 1; j < length; j++) { t[j] = sourceTriangleMatrix[j][i]; } } final RealMatrix coefficients = new Array2DRowRealMatrix(matrix, false); try { final DecompositionSolver solver = new CholeskyDecomposition(coefficients).getSolver(); final RealVector constants = new ArrayRealVector(eq.getConstraints(), true); final RealVector solution = solver.solve(constants); return new DefaultSlopeCoefficients(solution.toArray()); } catch (final NonPositiveDefiniteMatrixException e) { throw new EstimationException("Matrix inversion error due to data is linearly dependent", e); } }
@Test public void testSqrtVector() { final RealVector result = MatrixFunctions.sqrt(vector); for (int i = 0; i < result.getDimension(); i++) { assertEquals(Math.sqrt(vector.getEntry(i)), result.getEntry(i), 0); } }
private NoveltyScores score(final RealMatrix kernelMatrix) { final RealMatrix projectionVectors = kernelMatrix.transpose().multiply(m_projection); // squared euclidean distances to target points: final RealMatrix squared_distances = squared_euclidean_distances(projectionVectors, m_targetPoints); // novelty scores as minimum distance to one of the target points final RealVector scoreVector = MatrixFunctions .sqrt(MatrixFunctions.rowMins(squared_distances)); return new NoveltyScores(scoreVector.toArray(), projectionVectors); }
private RealMatrix squared_euclidean_distances(final RealMatrix x, final RealMatrix y) { final RealMatrix distmat = MatrixUtils .createRealMatrix(x.getRowDimension(), y.getRowDimension()); for (int i = 0; i < x.getRowDimension(); i++) { for (int j = 0; j < y.getRowDimension(); j++) { final RealVector buff = x.getRowVector(i).subtract(y.getRowVector(j)); distmat.setEntry(i, j, buff.dotProduct(buff)); } } return distmat; }
public static RealVector sqrt(final RealVector vector) { final RealVector result = vector.copy(); for (int e = 0; e < result.getDimension(); e++) { result.setEntry(e, Math.sqrt(result.getEntry(e))); } return result; }
public static RealVector simpleNewton(RealVector currentApprox) { RealVector nextApprox = simpleNewtonIteration(currentApprox); int i = 0; while (nextApprox.add(currentApprox.mapMultiply(-1)).getLInfNorm() > 1E-15) { currentApprox = nextApprox; nextApprox = simpleNewtonIteration(currentApprox); ++i; } return nextApprox; }
public static RealVector simpleNewtonIteration(RealVector currentApprox) { NewtonMethod method = new NewtonMethod(); double[] temp = currentApprox.toArray(); method.setJacobiMatrix(temp); method.setEquationSystem(temp); RealVector vector = method.solveOfEquation(); return vector.add(currentApprox); }
public static RealVector modifyFirstIterationMatrixNewton(RealVector currentApprox) { NewtonMethod method = new NewtonMethod(); method.setJacobiMatrix(currentApprox.toArray()); method.setEquationSystem(currentApprox.toArray()); RealVector nextApprox = method.solveOfEquation().add(currentApprox); while (nextApprox.add(currentApprox.mapMultiply(-1)).getLInfNorm() > 1E-15) { currentApprox = nextApprox; method.setEquationSystem(currentApprox.toArray()); nextApprox = method.solveOfEquation().add(currentApprox); } return nextApprox; }
public static RealVector changeMethod(RealVector currentApprox, int k) { RealVector nextApprox = simpleNewtonIteration(currentApprox); while (--k > 0) { currentApprox = nextApprox; nextApprox = simpleNewtonIteration(currentApprox); } return modifyFirstIterationMatrixNewton(currentApprox); }
public static void main(String[] cmd_args) throws IOException { Double[][] data = readDoubleMatrix("src/nrmix/spike6x2000.data"); RealVector[] Data = new RealVector[data.length]; for (int i=0; i<data.length; i++) { Data[i] = new ArrayRealVector(data[i]); } RealVector[] Pred = new RealVector[0]; int numBurnin = 1000; int numSample = 5000; int numThinning = 10; int numNewClusters = 1; int numPrint = 10; double alphaShape = 1; double alphaInvScale = 1; double sigmaAlpha = 1; double sigmaBeta = 2; double tauShape = 1e9; double tauInvScale = 1e9; int numdim = Data[0].getDimension(); double meanRelScale = 1.0; double precisionDegFreedom = numdim+3.0; double invScaleDegFreedom = numdim-0.6; double precisionScale = 50.0; /* nrmixmv.run("neal8",Data,Pred,"output.nrmix.spikes", numBurnin, numSample, numThinning, numNewClusters, numPrint, alphaShape, alphaInvScale, sigmaAlpha, sigmaBeta, tauShape, tauInvScale, true, meanRelScale, precisionDegFreedom, invScaleDegFreedom, precisionScale); */ //nrmix.nrmix.run(true,false,"slice",data,"testlogacidslice"); }
@Override public void sample() { if (number==0) { param = prior.drawSample(); return; } Cholesky newPrecision = new Cholesky( prior.getMeanPrecision().add(param.getPrecision().scalarMultiply(number))); RealVector newMean = newPrecision.getSolver().solve( prior.getMeanPrecision().operate(prior.getMeanMean()).add( param.getPrecision().operate(sumX))); param.setMean(generator.nextMVNormalMeanPrecision(newMean, newPrecision)); double newDegFreedom = prior.getPrecisionDegFreedom() + number; // System.out.println("hierarchysampled.sample: " + prior.precisionInvScale // .add(sumXX) // .add(param.mean.outerProduct(param.mean.mapMultiply(number))) // .subtract(param.mean.outerProduct(sumX)) // .subtract(sumX.outerProduct(param.mean))); Cholesky newInvScale = new Cholesky( prior.getPrecisionInvScale() .add(sumXX) .add(param.getMean().outerProduct(param.getMean().mapMultiply(number))) .subtract(param.getMean().outerProduct(sumX)) .subtract(sumX.outerProduct(param.getMean()))); param.setPrecision(generator.nextWishart(newDegFreedom, newInvScale)); }
@Override public void removeDatum(RealVector datum) { number -= 1; sumX = sumX.subtract(datum); sumXX = sumXX.subtract(datum.outerProduct(datum)); assert number >= 0; // System.out.println("removedatum: "+sumXX); assert (new CholeskyDecomposition(sumXX.add(prior.getPrecisionInvScale()))).getDeterminant()>-1e-10; }
/** * Hyperparameters of MVNormal-Gamma-Independent. * @param meanMean * @param meanPrecision * @param precisionDegFreedom * @param precisionInvScale */ public MVNormalWishartIndependent( RealVector meanMean, RealMatrix meanPrecision, double precisionDegFreedom, double precisionInvScaleDegFreedom, RealMatrix precisionInvScaleInvScale) { this.meanMean = meanMean; this.meanPrecision = meanPrecision; this.precisionDegFreedom = precisionDegFreedom; this.precisionInvScaleDegFreedom = precisionInvScaleDegFreedom; this.precisionInvScaleInvScale = precisionInvScaleInvScale; meanPrecisionChol = new Cholesky(meanPrecision); precisionInvScaleInvScaleChol = new Cholesky(precisionInvScaleInvScale); precisionInvScale = precisionInvScaleInvScaleChol .getSolver() .getInverse() .scalarMultiply(precisionInvScaleDegFreedom); precisionInvScaleChol = new Cholesky(precisionInvScale); numdim = meanMean.getDimension(); assert precisionDegFreedom > numdim-1; assert precisionInvScaleDegFreedom > numdim-1; assert meanPrecision.getRowDimension()==numdim; assert precisionInvScaleInvScale.getRowDimension()==numdim; constant = .5*(precisionDegFreedom+1.0)*numdim*log(2.0) +.25*numdim*(numdim+1)*log(PI) -.5*meanPrecisionChol.getLogDeterminant(); for (int i=1; i<=numdim; i++) { constant += logGamma(.5*(precisionDegFreedom+1-i)); } }
@Override public double logProbability(MVNormal datum) { int d = meanMean.getDimension(); RealVector diff = datum.getMean().subtract(meanMean); return -.5*diff.dotProduct(meanPrecision.operate(diff)) -.5*traceDot(datum.getPrecision()) +.5*(precisionDegFreedom-d-1)*datum.getPrecisionLogDeterminant() -logNormalizer(); }
static public RealVector double2RealVector(double[] x) { RealVector y = new ArrayRealVector(x.length); for (int i=0; i<x.length; i++) { y.setEntry(i, x[i]); } return y; }
static public RealVector[] double2RealVector(double[][] x) { RealVector[] y = new RealVector[x.length]; for (int i=0; i<x.length; i++) { y[i] = double2RealVector(x[i]); } return y; }
@Override protected List<RealVector> getFromCache(Collection<String> terms) { List<RealVector> termVectors = new ArrayList<>(); terms.stream(). filter(t -> this.vectorsCache.containsKey(t)). forEach((t) -> termVectors.add(this.vectorsCache.get(t))); return termVectors; }
@Override public double sim(RealVector r1, RealVector r2, boolean sparse) { if (r1.getDimension() != r2.getDimension()) { return 0; } double divergence = 0.0; double avr = 0.0; for (int i = 0; i < r1.getDimension(); ++i) { avr = (r1.getEntry(i) + r2.getEntry(i)) / 2; if (r1.getEntry(i) > 0.0 && avr > 0.0) { divergence += r1.getEntry(i) * Math.log(r1.getEntry(i) / avr); } } for (int i = 0; i < r2.getDimension(); ++i) { avr = (r1.getEntry(i) + r2.getEntry(i)) / 2; if (r2.getEntry(i) > 0.0 && avr > 0.0) { divergence += r1.getEntry(i) * Math.log(r2.getEntry(i) / avr); } } double result = 1 - (divergence / (2 * Math.sqrt(2 * Math.log(2)))); return Math.abs(result); }
@Override protected double computeTSS(RealVector y) { if (!hasIntercept()) { return y.dotProduct(y); } else { final C regressand = getRegressand(); final double sumOfWeights = weights.stats().sum().doubleValue(); final Array<Double> yValues = Array.of(frame().col(regressand).toDoubleStream().toArray()); final double weightedAvg = yValues.mapToDoubles(v -> v.getDouble() * weights.getDouble(v.index())).stats().sum().doubleValue() / sumOfWeights; final Array<Double> diffSquared = yValues.mapToDoubles(v -> weights.getDouble(v.index()) * Math.pow(v.getDouble() - weightedAvg, 2d)); return diffSquared.stats().sum().doubleValue(); } }
@Override public double sim(RealVector r1, RealVector r2, boolean sparse) { if (r1.getDimension() != r2.getDimension()) { return 0; } double sum = 0.0; for (int i = 0; i < r1.getDimension(); ++i) { sum += Math.abs((r1.getEntry(i) - r2.getEntry(i))); } double result = 1 / (1 + (sum == Double.NaN ? 0 : sum)); return Math.abs(result); }
@Override public RealVector compose(List<RealVector> vectors) { RealVector res = super.compose(vectors); if (vectors != null && vectors.size() > 1) { res.mapDivideToSelf(vectors.size()); } return res; }
public final Map<String, double[]> getVectorsAsArray(List<String> terms, VectorRequest request) { Map<String, RealVector> inVectors = getVectors(terms, request); Map<String, double[]> outVectors = new HashMap<>(); for (String term : inVectors.keySet()) { RealVector realVector = inVectors.get(term); outVectors.put(term, realVector != null ? realVector.toArray() : null); } return outVectors; }
public final Map<String, Map<Integer, Double>> getVectorsAsMap(List<String> terms, VectorRequest request) { Map<String, RealVector> inVectors = getVectors(terms, request); Map<String, Map<Integer, Double>> outVectors = new HashMap<>(); for (String term : inVectors.keySet()) { RealVector realVector = inVectors.get(term); outVectors.put(term, realVector != null ? RealVectorUtil.vectorToMap(realVector) : null); } return outVectors; }