Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
03/16/19 12:49:37 (5 years ago)
Author:
gkronber
Message:

#2974: changed LMConstantsOptimizer to use minlm interface of alglib instead of lsfit interface.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2974_Constants_Optimization/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/ConstantsOptimization/LMConstantsOptimizer.cs

    r16514 r16689  
    6464
    6565      //Variables of the symbolic expression tree correspond to parameters in the term
    66       //Hence if no parameters are present no variables occur in the tree and the R² = 0
     66      //Hence if no parameters are present we can't do anything and R² stays the same.
    6767      if (term.Parameters.Count == 0) return 0.0;
    6868
     
    103103      int numberOfConstants = optimizedConstants.Length;
    104104
    105       alglib.lsfitstate state;
    106       alglib.lsfitreport rep;
     105      alglib.minlmstate state;
     106      alglib.minlmreport rep;
    107107      alglib.ndimensional_rep xrep = (p, f, obj) => LM_IterationCallback(p, f, obj);
    108       int retVal;
    109108
    110109      try {
    111         alglib.lsfitcreatefg(x, y, optimizedConstants, numberOfRows, numberOfColumns, numberOfConstants, cheapfg: false, state: out state);
    112         alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
    113         alglib.lsfitsetxrep(state, LM_IterationCallback != null);
    114         alglib.lsfitfit(state, Evaluate, EvaluateGradient, xrep, term);
    115         alglib.lsfitresults(state, out retVal, out optimizedConstants, out rep);
     110        alglib.minlmcreatevj(numberOfRows, optimizedConstants, state: out state);
     111        alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxIterations);
     112        alglib.minlmsetxrep(state, LM_IterationCallback != null);
     113        alglib.minlmoptimize(state, Evaluate, EvaluateGradient, xrep, new object[] { term, x, y });
     114        alglib.minlmresults(state, out optimizedConstants, out rep);
    116115      } catch (ArithmeticException) {
    117116        constants = new double[0];
     
    122121      }
    123122
     123      // error
     124      if (rep.terminationtype < 0) {
     125        constants = initialConstants; return 0;
     126      }
    124127      constants = optimizedConstants;
    125       return rep.r2;
     128
     129      // calculate prediction with optimized constants to calculate R²
     130      double[] pred = new double[numberOfRows];
     131      double[] zeros = new double[numberOfRows];
     132      Evaluate(constants, pred, new object[] { term, x, zeros });
     133      var r = OnlinePearsonsRCalculator.Calculate(pred, y, out OnlineCalculatorError error);
     134      if (error != OnlineCalculatorError.None) r = 0;
     135      return r * r;
    126136    }
    127137
    128 
    129     private static void Evaluate(double[] c, double[] x, ref double fx, object o) {
    130       AutoDiff.IParametricCompiledTerm term = (AutoDiff.IParametricCompiledTerm)o;
    131       fx = term.Evaluate(c, x);
     138    private static void Evaluate(double[] c, double[] fi, object o) {
     139      var objs = (object[])o;
     140      AutoDiff.IParametricCompiledTerm term = (AutoDiff.IParametricCompiledTerm)objs[0];
     141      var x = (double[,])objs[1];
     142      var y = (double[])objs[2];
     143      double[] xi = new double[x.GetLength(1)];
     144      for (int i = 0; i < fi.Length; i++) {
     145        // copy data row
     146        for (int j = 0; j < xi.Length; j++) xi[j] = x[i, j];
     147        fi[i] = term.Evaluate(c, xi) - y[i];
     148      }
    132149    }
    133150
    134     private static void EvaluateGradient(double[] c, double[] x, ref double fx, double[] grad, object o) {
    135       AutoDiff.IParametricCompiledTerm term = (AutoDiff.IParametricCompiledTerm)o;
    136       Tuple<double[], double> result = term.Differentiate(c, x);
    137       fx = result.Item2;
    138       Array.Copy(result.Item1, grad, grad.Length);
     151    private static void EvaluateGradient(double[] c, double[] fi, double[,] jac, object o) {
     152      var objs = (object[])o;
     153      AutoDiff.IParametricCompiledTerm term = (AutoDiff.IParametricCompiledTerm)objs[0];
     154      var x = (double[,])objs[1];
     155      var y = (double[])objs[2];
     156      double[] xi = new double[x.GetLength(1)];
     157      for (int i = 0; i < fi.Length; i++) {
     158        // copy data row
     159        for (int j = 0; j < xi.Length; j++) xi[j] = x[i, j];
     160        Tuple<double[], double> result = term.Differentiate(c, xi);
     161        fi[i] = result.Item2 - y[i];
     162        var g = result.Item1;
     163        // copy gradient to Jacobian
     164        for (int j = 0; j < c.Length; j++) {
     165          jac[i, j] = g[j];
     166        }
     167      }
    139168    }
    140169  }
Note: See TracChangeset for help on using the changeset viewer.