/** * Classify an instance. * * @param inst the instance to predict * @return a prediction for the instance * @throws Exception if an error occurs */ public double classifyInstance(Instance inst) throws Exception { double prediction = m_zeroR.classifyInstance(inst); // default model? if (!m_SuitableData) { return prediction; } for (Classifier classifier : m_Classifiers) { double toAdd = classifier.classifyInstance(inst); if (Utils.isMissingValue(toAdd)) { throw new UnassignedClassException("AdditiveRegression: base learner predicted missing value."); } toAdd *= getShrinkage(); prediction += toAdd; } return prediction; }
/** * Replace the class values of the instances from the current iteration * with residuals ater predicting with the supplied classifier. * * @param data the instances to predict * @param c the classifier to use * @param useShrinkage whether shrinkage is to be applied to the model's output * @return a new set of instances with class values replaced by residuals * @throws Exception if something goes wrong */ private Instances residualReplace(Instances data, Classifier c, boolean useShrinkage) throws Exception { double pred,residual; Instances newInst = new Instances(data); for (int i = 0; i < newInst.numInstances(); i++) { pred = c.classifyInstance(newInst.instance(i)); if (Utils.isMissingValue(pred)) { throw new UnassignedClassException("AdditiveRegression: base learner predicted missing value."); } if (useShrinkage) { pred *= getShrinkage(); } residual = newInst.instance(i).classValue() - pred; newInst.instance(i).setClassValue(residual); } // System.err.print(newInst); return newInst; }
/** * Sets the format of the input instances. * * @param instanceInfo an Instances object containing the input instance * structure (any instances contained in the object are ignored - * only the structure is required). * @return true if the outputFormat may be collected immediately * @throws Exception if the input format can't be set successfully */ @Override public boolean setInputFormat(Instances instanceInfo) throws Exception { super.setInputFormat(instanceInfo); if (instanceInfo.classIndex() < 0) { throw new UnassignedClassException( "No class has been assigned to the instances"); } setOutputFormat(); m_Indices = null; if (instanceInfo.classAttribute().isNominal()) { return true; } else { return false; } }
/** * Sets the format of the input instances. * * @param instanceInfo an Instances object containing the input * instance structure (any instances contained in the object are * ignored - only the structure is required). * @return true if the outputFormat may be collected immediately * @throws Exception if the input format can't be set * successfully */ public boolean setInputFormat(Instances instanceInfo) throws Exception { super.setInputFormat(instanceInfo); if (instanceInfo.classIndex() < 0) { throw new UnassignedClassException("No class has been assigned to the instances"); } setOutputFormat(); m_Indices = null; if (instanceInfo.classAttribute().isNominal()) { return true; } else { return false; } }
/** * Converts the header info of the given set of instances into a set * of item sets (singletons). The ordering of values in the header file * determines the lexicographic order. * * @param instances the set of instances whose header info is to be used * @return a set of item sets, each containing a single item * @exception Exception if singletons can't be generated successfully */ public static FastVector singletons(Instances instances) throws Exception { FastVector setOfItemSets = new FastVector(); ItemSet current; if(instances.classIndex() == -1) throw new UnassignedClassException("Class index is negative (not set)!"); Attribute att = instances.classAttribute(); for (int i = 0; i < instances.numAttributes(); i++) { if (instances.attribute(i).isNumeric()) throw new Exception("Can't handle numeric attributes!"); if(i != instances.classIndex()){ for (int j = 0; j < instances.attribute(i).numValues(); j++) { current = new ItemSet(instances.numInstances()); int[] currentItems = new int[instances.numAttributes()]; for (int k = 0; k < instances.numAttributes(); k++) currentItems[k] = -1; currentItems[i] = j; current.setItem(currentItems); setOfItemSets.addElement(current); } } } return setOfItemSets; }
/** * Calculates the class membership probabilities for the given test instance. * * @param instance the instance to be classified * @return predicted class probability distribution * @throws Exception if instance could not be classified successfully */ public double[] distributionForInstance(Instance inst) throws Exception { // default model? if (m_ZeroR != null) { return m_ZeroR.distributionForInstance(inst); } double[] Fs = new double[m_NumClasses]; double[] pred = new double[m_NumClasses]; Instance instance = (Instance) inst.copy(); instance.setDataset(m_NumericClassData); for (int i = 0; i < m_NumGenerated; i++) { double predSum = 0; for (int j = 0; j < m_NumClasses; j++) { double tempPred = m_Shrinkage * m_Classifiers.get(i)[j].classifyInstance(instance); if (Utils.isMissingValue(tempPred)) { throw new UnassignedClassException( "LogitBoost: base learner predicted missing value."); } pred[j] = tempPred; if (m_NumClasses == 2) { pred[1] = -tempPred; // Can treat 2 classes as special case break; } predSum += pred[j]; } predSum /= m_NumClasses; for (int j = 0; j < m_NumClasses; j++) { Fs[j] += (pred[j] - predSum) * (m_NumClasses - 1) / m_NumClasses; } } return probs(Fs); }
/** * Returns the distribution for an instance. * * @param inst the instance to get the distribution for * @return the computed distribution * @throws Exception if the distribution can't be computed successfully */ public double[] distributionForInstance(Instance inst) throws Exception { double[] probs = new double[inst.numClasses()]; Instance newInst; double sum = 0; for (int i = 0; i < inst.numClasses(); i++) { m_ClassFilters[i].input(inst); m_ClassFilters[i].batchFinished(); newInst = m_ClassFilters[i].output(); probs[i] = m_Classifiers[i].classifyInstance(newInst); if (Utils.isMissingValue(probs[i])) { throw new UnassignedClassException("ClassificationViaRegression: base learner predicted missing value."); } if (probs[i] > 1) { probs[i] = 1; } if (probs[i] < 0){ probs[i] = 0; } sum += probs[i]; } if (sum != 0) { Utils.normalize(probs, sum); } return probs; }
/** * Calculates the class membership probabilities for the given test instance. * * @param instance the instance to be classified * @return predicted class probability distribution * @throws Exception if instance could not be classified * successfully */ public double[] distributionForInstance(Instance inst) throws Exception { // default model? if (m_ZeroR != null) { return m_ZeroR.distributionForInstance(inst); } double[] Fs = new double [m_NumClasses]; double [] pred = new double [m_NumClasses]; Instance instance = (Instance)inst.copy(); instance.setDataset(m_NumericClassData); for (int i = 0; i < m_NumGenerated; i++) { double predSum = 0; for (int j = 0; j < m_NumClasses; j++) { double tempPred = m_Shrinkage * m_Classifiers.get(i)[j].classifyInstance(instance); if (Utils.isMissingValue(tempPred)) { throw new UnassignedClassException("LogitBoost: base learner predicted missing value."); } pred[j] = tempPred; if (m_NumClasses == 2) { pred[1] = -tempPred; // Can treat 2 classes as special case break; } predSum += pred[j]; } predSum /= m_NumClasses; for (int j = 0; j < m_NumClasses; j++) { Fs[j] += (pred[j] - predSum) * (m_NumClasses - 1) / m_NumClasses; } } return probs(Fs); }