[16500] | 1 | #region License Information
|
---|
| 2 | /* HeuristicLab
|
---|
| 3 | * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
| 4 | *
|
---|
| 5 | * This file is part of HeuristicLab.
|
---|
| 6 | *
|
---|
| 7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
| 8 | * it under the terms of the GNU General Public License as published by
|
---|
| 9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 10 | * (at your option) any later version.
|
---|
| 11 | *
|
---|
| 12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 15 | * GNU General Public License for more details.
|
---|
| 16 | *
|
---|
| 17 | * You should have received a copy of the GNU General Public License
|
---|
| 18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 19 | */
|
---|
| 20 | #endregion
|
---|
| 21 |
|
---|
| 22 | using System;
|
---|
| 23 | using System.Collections.Generic;
|
---|
| 24 | using System.Linq;
|
---|
| 25 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
| 26 |
|
---|
[16507] | 27 | namespace HeuristicLab.Problems.DataAnalysis.Symbolic.ConstantsOptimization {
|
---|
[16500] | 28 | public class LMConstantsOptimizer {
|
---|
| 29 |
|
---|
[16507] | 30 | private LMConstantsOptimizer() { }
|
---|
[16500] | 31 |
|
---|
[16507] | 32 | /// <summary>
|
---|
| 33 | /// Method to determine whether the numeric constants of the tree can be optimized. This depends primarily on the symbols occuring in the tree.
|
---|
| 34 | /// </summary>
|
---|
| 35 | /// <param name="tree">The tree that should be analyzed</param>
|
---|
| 36 | /// <returns>A flag indicating whether the numeric constants of the tree can be optimized</returns>
|
---|
| 37 | public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
|
---|
| 38 | return AutoDiffConverter.IsCompatible(tree);
|
---|
[16500] | 39 | }
|
---|
[16507] | 40 |
|
---|
[16500] | 41 | /// <summary>
|
---|
[16507] | 42 | /// Optimizes the numeric constants in a symbolic expression tree in place.
|
---|
[16500] | 43 | /// </summary>
|
---|
[16507] | 44 | /// <param name="tree">The tree for which the constants should be optimized</param>
|
---|
| 45 | /// <param name="dataset">The dataset containing the data.</param>
|
---|
| 46 | /// <param name="targetVariable">The target variable name.</param>
|
---|
| 47 | /// <param name="rows">The rows for which the data should be extracted.</param>
|
---|
| 48 | /// <param name="applyLinearScaling">A flag to determine whether linear scaling should be applied during the optimization</param>
|
---|
[17393] | 49 | /// <param name="maxIterations">The maximum number of iterations of the Levenberg-Marquardt algorithm.</param>
|
---|
[16507] | 50 | /// <returns></returns>
|
---|
| 51 | public static double OptimizeConstants(ISymbolicExpressionTree tree,
|
---|
| 52 | IDataset dataset, string targetVariable, IEnumerable<int> rows,
|
---|
| 53 | bool applyLinearScaling, int maxIterations = 10) {
|
---|
| 54 | if (tree == null) throw new ArgumentNullException("tree");
|
---|
| 55 | if (dataset == null) throw new ArgumentNullException("dataset");
|
---|
| 56 | if (!dataset.ContainsVariable(targetVariable)) throw new ArgumentException("The dataset does not contain the provided target variable.");
|
---|
[16500] | 57 |
|
---|
[16507] | 58 | var allVariables = Util.ExtractVariables(tree);
|
---|
| 59 | var numericNodes = Util.ExtractNumericNodes(tree);
|
---|
[16500] | 60 |
|
---|
[16507] | 61 | AutoDiff.IParametricCompiledTerm term;
|
---|
| 62 | if (!AutoDiffConverter.TryConvertToAutoDiff(tree, applyLinearScaling, numericNodes, allVariables, out term))
|
---|
| 63 | throw new NotSupportedException("Could not convert symbolic expression tree to an AutoDiff term due to not supported symbols used in the tree.");
|
---|
[16500] | 64 |
|
---|
[17393] | 65 | // Variables of the symbolic expression tree correspond to parameters in the term.
|
---|
| 66 | // Hence if no parameters are present we can't do anything and R² stays the same.
|
---|
[16507] | 67 | if (term.Parameters.Count == 0) return 0.0;
|
---|
[16500] | 68 |
|
---|
[16507] | 69 | var initialConstants = Util.ExtractConstants(numericNodes, applyLinearScaling);
|
---|
| 70 | double[] constants;
|
---|
| 71 | double[,] x = Util.ExtractData(dataset, allVariables, rows);
|
---|
| 72 | double[] y = dataset.GetDoubleValues(targetVariable, rows).ToArray();
|
---|
[16500] | 73 |
|
---|
[16507] | 74 | var result = OptimizeConstants(term, initialConstants, x, y, maxIterations, out constants);
|
---|
[16514] | 75 | if (result > 0.0 && constants.Length != 0)
|
---|
[16507] | 76 | Util.UpdateConstants(numericNodes, constants);
|
---|
[16500] | 77 |
|
---|
[16507] | 78 | return result;
|
---|
| 79 | }
|
---|
| 80 |
|
---|
| 81 | /// <summary>
|
---|
[17393] | 82 | /// Optimizes the numeric coefficents of an AutoDiff Term using the Levenberg-Marquardt algorithm.
|
---|
[16507] | 83 | /// </summary>
|
---|
| 84 | /// <param name="term">The AutoDiff term for which the numeric coefficients should be optimized.</param>
|
---|
| 85 | /// <param name="initialConstants">The starting values for the numeric coefficients.</param>
|
---|
| 86 | /// <param name="x">The input data for the optimization.</param>
|
---|
| 87 | /// <param name="y">The target values for the optimization.</param>
|
---|
[17393] | 88 | /// <param name="maxIterations">The maximum number of iterations of the Levenberg-Marquardt</param>
|
---|
| 89 | /// <param name="constants">The optimized constants.</param>
|
---|
[16507] | 90 | /// <param name="LM_IterationCallback">An optional callback for detailed analysis that is called in each algorithm iteration.</param>
|
---|
| 91 | /// <returns>The R² of the term evaluated on the input data x and the target data y using the optimized constants</returns>
|
---|
| 92 | public static double OptimizeConstants(AutoDiff.IParametricCompiledTerm term, double[] initialConstants, double[,] x, double[] y,
|
---|
| 93 | int maxIterations, out double[] constants, Action<double[], double, object> LM_IterationCallback = null) {
|
---|
| 94 |
|
---|
| 95 | if (term.Parameters.Count == 0) {
|
---|
| 96 | constants = new double[0];
|
---|
| 97 | return 0.0;
|
---|
| 98 | }
|
---|
| 99 |
|
---|
| 100 | var optimizedConstants = (double[])initialConstants.Clone();
|
---|
| 101 | int numberOfRows = x.GetLength(0);
|
---|
| 102 | int numberOfColumns = x.GetLength(1);
|
---|
| 103 | int numberOfConstants = optimizedConstants.Length;
|
---|
| 104 |
|
---|
[16689] | 105 | alglib.minlmstate state;
|
---|
| 106 | alglib.minlmreport rep;
|
---|
[16507] | 107 | alglib.ndimensional_rep xrep = (p, f, obj) => LM_IterationCallback(p, f, obj);
|
---|
[16500] | 108 |
|
---|
| 109 | try {
|
---|
[16689] | 110 | alglib.minlmcreatevj(numberOfRows, optimizedConstants, state: out state);
|
---|
| 111 | alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxIterations);
|
---|
| 112 | alglib.minlmsetxrep(state, LM_IterationCallback != null);
|
---|
| 113 | alglib.minlmoptimize(state, Evaluate, EvaluateGradient, xrep, new object[] { term, x, y });
|
---|
| 114 | alglib.minlmresults(state, out optimizedConstants, out rep);
|
---|
[16500] | 115 | } catch (ArithmeticException) {
|
---|
[16507] | 116 | constants = new double[0];
|
---|
[16500] | 117 | return double.NaN;
|
---|
| 118 | } catch (alglib.alglibexception) {
|
---|
[16507] | 119 | constants = new double[0];
|
---|
[16500] | 120 | return double.NaN;
|
---|
| 121 | }
|
---|
| 122 |
|
---|
[16689] | 123 | // error
|
---|
| 124 | if (rep.terminationtype < 0) {
|
---|
| 125 | constants = initialConstants; return 0;
|
---|
| 126 | }
|
---|
[16507] | 127 | constants = optimizedConstants;
|
---|
[16689] | 128 |
|
---|
| 129 | // calculate prediction with optimized constants to calculate R²
|
---|
| 130 | double[] pred = new double[numberOfRows];
|
---|
| 131 | double[] zeros = new double[numberOfRows];
|
---|
| 132 | Evaluate(constants, pred, new object[] { term, x, zeros });
|
---|
| 133 | var r = OnlinePearsonsRCalculator.Calculate(pred, y, out OnlineCalculatorError error);
|
---|
| 134 | if (error != OnlineCalculatorError.None) r = 0;
|
---|
| 135 | return r * r;
|
---|
[16500] | 136 | }
|
---|
| 137 |
|
---|
[16689] | 138 | private static void Evaluate(double[] c, double[] fi, object o) {
|
---|
| 139 | var objs = (object[])o;
|
---|
| 140 | AutoDiff.IParametricCompiledTerm term = (AutoDiff.IParametricCompiledTerm)objs[0];
|
---|
| 141 | var x = (double[,])objs[1];
|
---|
| 142 | var y = (double[])objs[2];
|
---|
| 143 | double[] xi = new double[x.GetLength(1)];
|
---|
| 144 | for (int i = 0; i < fi.Length; i++) {
|
---|
| 145 | // copy data row
|
---|
| 146 | for (int j = 0; j < xi.Length; j++) xi[j] = x[i, j];
|
---|
| 147 | fi[i] = term.Evaluate(c, xi) - y[i];
|
---|
| 148 | }
|
---|
[16500] | 149 | }
|
---|
| 150 |
|
---|
[16689] | 151 | private static void EvaluateGradient(double[] c, double[] fi, double[,] jac, object o) {
|
---|
| 152 | var objs = (object[])o;
|
---|
| 153 | AutoDiff.IParametricCompiledTerm term = (AutoDiff.IParametricCompiledTerm)objs[0];
|
---|
| 154 | var x = (double[,])objs[1];
|
---|
| 155 | var y = (double[])objs[2];
|
---|
| 156 | double[] xi = new double[x.GetLength(1)];
|
---|
| 157 | for (int i = 0; i < fi.Length; i++) {
|
---|
| 158 | // copy data row
|
---|
| 159 | for (int j = 0; j < xi.Length; j++) xi[j] = x[i, j];
|
---|
| 160 | Tuple<double[], double> result = term.Differentiate(c, xi);
|
---|
| 161 | fi[i] = result.Item2 - y[i];
|
---|
| 162 | var g = result.Item1;
|
---|
| 163 | // copy gradient to Jacobian
|
---|
| 164 | for (int j = 0; j < c.Length; j++) {
|
---|
| 165 | jac[i, j] = g[j];
|
---|
| 166 | }
|
---|
| 167 | }
|
---|
[16500] | 168 | }
|
---|
| 169 | }
|
---|
| 170 | }
|
---|