Free cookie consent management tool by TermsFeed Policy Generator

source: branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/NonlinearLeastSquaresVectorConstantOptimizationEvaluator.cs @ 18239

Last change on this file since 18239 was 18239, checked in by pfleck, 2 years ago

#3040 Updated to newer TensorFlow.NET version.

  • Removed IL Merge from TensorFlow.NET.
  • Temporarily removed DiffSharp.
  • Changed to a locally built Attic with a specific Protobuf version that is compatible with TensorFlow.NET. (Also adapted other versions of nuget dependencies.)
File size: 9.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22#if INCLUDE_DIFFSHARP
23
24using System;
25using System.Collections.Generic;
26using System.Linq;
27using System.Threading;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
32using HeuristicLab.Parameters;
33using HEAL.Attic;
34using DiffSharp.Interop.Float64;
35
36namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
37  [StorableType("7BC8666F-DB02-405A-9567-8378C7BBDE9A")]
38  [Item("NonlinearLeastSquaresVectorConstantOptimizationEvaluator", "")]
39  public class NonlinearLeastSquaresVectorConstantOptimizationEvaluator : SymbolicRegressionConstantOptimizationEvaluator {
40
41    private const string ConstantOptimizationIterationsName = "ConstantOptimizationIterations";
42
43    #region Parameter Properties
44    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
45      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsName]; }
46    }
47    #endregion
48
49    #region Properties
50    public int ConstantOptimizationIterations {
51      get { return ConstantOptimizationIterationsParameter.Value.Value; }
52      set { ConstantOptimizationIterationsParameter.Value.Value = value; }
53    }
54    #endregion
55
56    public NonlinearLeastSquaresVectorConstantOptimizationEvaluator()
57      : base() {
58      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree(0 indicates other or default stopping criterion).", new IntValue(10)));
59    }
60
61    protected NonlinearLeastSquaresVectorConstantOptimizationEvaluator(NonlinearLeastSquaresVectorConstantOptimizationEvaluator original, Cloner cloner)
62      : base(original, cloner) {
63    }
64    public override IDeepCloneable Clone(Cloner cloner) {
65      return new NonlinearLeastSquaresVectorConstantOptimizationEvaluator(this, cloner);
66    }
67    [StorableConstructor]
68    protected NonlinearLeastSquaresVectorConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
69
70    protected override ISymbolicExpressionTree OptimizeConstants(
71      ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows,
72      CancellationToken cancellationToken = default(CancellationToken), EvaluationsCounter counter = null) {
73      return OptimizeTree(tree,
74        problemData, rows,
75        ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations, UpdateVariableWeights,
76        cancellationToken, counter);
77    }
78
79    public static ISymbolicExpressionTree OptimizeTree(
80      ISymbolicExpressionTree tree,
81      IRegressionProblemData problemData, IEnumerable<int> rows,
82      bool applyLinearScaling, int maxIterations, bool updateVariableWeights,
83      CancellationToken cancellationToken = default(CancellationToken), EvaluationsCounter counter = null, Action<double[], double, object> iterationCallback = null) {
84
85
86
87      DV targets = new DV(problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows).ToArray());
88      DV Predictions(DV variables) {
89        var predictions = rows.Select(row => {
90          var scalarParams = problemData.AllowedInputVariables.Intersect(problemData.Dataset.DoubleVariables)
91            .ToDictionary(var => var, var => new D(problemData.Dataset.GetDoubleValue(var, row)));
92          var vectorParams = problemData.AllowedInputVariables.Intersect(problemData.Dataset.DoubleVectorVariables)
93            .ToDictionary(var => var, var => new DV(problemData.Dataset.GetDoubleVectorValue(var, row).ToArray()));
94
95          return TreeToDiffSharpConverter.Evaluate(tree,
96            updateVariableWeights, applyLinearScaling,
97            variables,
98            scalarParams, vectorParams);
99        }).ToArray();
100        return new DV(predictions);
101      }
102      D Mse(DV vars) => DV.Mean(DV.Pow(targets - Predictions(vars), 2));
103
104      Func<DV, D> loss = Mse;
105      Func<DV, DV> lossGrad = AD.Grad(loss);
106      Func<DV, DM> lossHess = AD.Hessian(loss);
107
108      var initialConstants = TreeToDiffSharpConverter.GetInitialConstants(tree,
109        updateVariableWeights, applyLinearScaling,
110        problemData.AllowedInputVariables.Intersect(problemData.Dataset.DoubleVariables)
111          .ToDictionary(var => var, var => new D(problemData.Dataset.GetDoubleValue(var, rows.First()))),
112        problemData.AllowedInputVariables.Intersect(problemData.Dataset.DoubleVectorVariables)
113          .ToDictionary(var => var, var => new DV(problemData.Dataset.GetDoubleVectorValue(var, rows.First()).ToArray())));
114
115
116      //extract initial constants
117      double[] c = initialConstants.ToArray();
118
119      alglib.ndimensional_func alglib_func = (double[] x, ref double func, object o) => {
120        var vars = new DV(x);
121        func = (double)loss(vars);
122        var cnt = (EvaluationsCounter)o;
123        cnt.FunctionEvaluations++;
124      };
125      alglib.ndimensional_grad alglib_grad = (double[] x, ref double func, double[] grad, object o) => {
126        var vars = new DV(x);
127        func = (double)loss(vars);
128        Array.Copy((double[])lossGrad(vars), grad, x.Length);
129        var cnt = (EvaluationsCounter)o;
130        cnt.GradientEvaluations++;
131      };
132      alglib.ndimensional_hess alglib_hess = (double[] x, ref double func, double[] grad, double[,] hess, object o) => {
133        var vars = new DV(x);
134        func = (double)loss(vars);
135        Array.Copy((double[])lossGrad(vars), grad, x.Length);
136        Array.Copy((double[,])lossHess(vars), hess, x.Length * x.Length);
137        var cnt = (EvaluationsCounter)o;
138        cnt.HessianEvaluations++;
139      };
140      alglib.ndimensional_rep xrep = (p, f, obj) => {
141        iterationCallback?.Invoke(p, f, obj);
142        cancellationToken.ThrowIfCancellationRequested();
143      };
144      var rowEvaluationsCounter = new EvaluationsCounter();
145
146      try {
147
148        alglib.minlmcreatefgh(c, out var state);
149        alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxIterations);
150        alglib.minlmsetxrep(state, iterationCallback != null || cancellationToken != default(CancellationToken));
151        alglib.minlmoptimize(state, alglib_func, alglib_grad, alglib_hess, xrep, rowEvaluationsCounter);
152        alglib.minlmresults(state, out c, out var rep);
153
154        ////retVal == -7  => constant optimization failed due to wrong gradient
155        //if (retVal == -1)
156        //  return (ISymbolicExpressionTree)tree.Clone();
157      } catch (ArithmeticException) {
158        return (ISymbolicExpressionTree)tree.Clone();
159      } catch (alglib.alglibexception) {
160        return (ISymbolicExpressionTree)tree.Clone();
161      }
162
163      if (counter != null) {
164        int n = rows.Count();
165        counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
166        counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
167        counter.HessianEvaluations += rowEvaluationsCounter.HessianEvaluations / n;
168      }
169
170      var newTree = (ISymbolicExpressionTree)tree.Clone();
171      UpdateConstants(newTree, c, updateVariableWeights);
172
173      return newTree;
174    }
175
176    private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
177      int i = 0;
178      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
179        ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
180        VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
181        FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
182        if (constantTreeNode != null)
183          constantTreeNode.Value = constants[i++];
184        else if (updateVariableWeights && variableTreeNodeBase != null)
185          variableTreeNodeBase.Weight = constants[i++];
186        else if (factorVarTreeNode != null) {
187          for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
188            factorVarTreeNode.Weights[j] = constants[i++];
189        }
190      }
191    }
192
193    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
194      return TreeToDiffSharpConverter.IsCompatible(tree);
195    }
196  }
197}
198
199#endif
Note: See TracBrowser for help on using the repository browser.