Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/ConstantOptimizationAnalyzer.cs @ 14783

Last change on this file since 14783 was 14185, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 8.3 KB
RevLine 
[5607]1#region License Information
2/* HeuristicLab
[14185]3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[5607]4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
[7721]22using System;
[5607]23using System.Linq;
[8844]24using HeuristicLab.Analysis;
[5607]25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Optimization;
29using HeuristicLab.Parameters;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31
[8844]32namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
[5607]33  /// <summary>
[8844]34  /// An operator that optimizes the constants for the best symbolic expression tress in the current generation.
[5607]35  /// </summary>
[8844]36  [Item("ConstantOptimizationAnalyzer", "An operator that performs a constant optimization on the best symbolic expression trees.")]
[5607]37  [StorableClass]
[8849]38  public sealed class ConstantOptimizationAnalyzer : SymbolicDataAnalysisSingleObjectiveAnalyzer, IStatefulItem {
[8844]39    private const string PercentageOfBestSolutionsParameterName = "PercentageOfBestSolutions";
40    private const string ConstantOptimizationEvaluatorParameterName = "ConstantOptimizationOperator";
[5607]41
[8844]42    private const string DataTableNameConstantOptimizationImprovement = "Constant Optimization Improvement";
43    private const string DataRowNameMinimumImprovement = "Minimum improvement";
[8893]44    private const string DataRowNameMedianImprovement = "Median improvement";
[8844]45    private const string DataRowNameAverageImprovement = "Average improvement";
46    private const string DataRowNameMaximumImprovement = "Maximum improvement";
47
[5607]48    #region parameter properties
[8844]49    public IFixedValueParameter<PercentValue> PercentageOfBestSolutionsParameter {
50      get { return (IFixedValueParameter<PercentValue>)Parameters[PercentageOfBestSolutionsParameterName]; }
[5607]51    }
[8844]52
53    public IFixedValueParameter<SymbolicRegressionConstantOptimizationEvaluator> ConstantOptimizationEvaluatorParameter {
54      get { return (IFixedValueParameter<SymbolicRegressionConstantOptimizationEvaluator>)Parameters[ConstantOptimizationEvaluatorParameterName]; }
[5607]55    }
56    #endregion
[8844]57
[5607]58    #region properties
[8844]59    public SymbolicRegressionConstantOptimizationEvaluator ConstantOptimizationEvaluator {
60      get { return ConstantOptimizationEvaluatorParameter.Value; }
[5607]61    }
[8844]62    public double PercentageOfBestSolutions {
63      get { return PercentageOfBestSolutionsParameter.Value.Value; }
[5607]64    }
[8844]65
66    private DataTable ConstantOptimizationImprovementDataTable {
67      get {
68        IResult result;
[8976]69        ResultCollection.TryGetValue(DataTableNameConstantOptimizationImprovement, out result);
[8844]70        if (result == null) return null;
71        return (DataTable)result.Value;
72      }
73    }
74    private DataRow MinimumImprovement {
75      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameMinimumImprovement]; }
76    }
[8893]77    private DataRow MedianImprovement {
78      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameMedianImprovement]; }
79    }
[8844]80    private DataRow AverageImprovement {
81      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameAverageImprovement]; }
82    }
83    private DataRow MaximumImprovement {
84      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameMaximumImprovement]; }
85    }
[5607]86    #endregion
87
88    [StorableConstructor]
[8845]89    private ConstantOptimizationAnalyzer(bool deserializing) : base(deserializing) { }
90    private ConstantOptimizationAnalyzer(ConstantOptimizationAnalyzer original, Cloner cloner) : base(original, cloner) { }
[8844]91    public override IDeepCloneable Clone(Cloner cloner) { return new ConstantOptimizationAnalyzer(this, cloner); }
92    public ConstantOptimizationAnalyzer()
[5607]93      : base() {
[8844]94      Parameters.Add(new FixedValueParameter<PercentValue>(PercentageOfBestSolutionsParameterName, "The percentage of the top solutions which should be analyzed.", new PercentValue(0.1)));
95      Parameters.Add(new FixedValueParameter<SymbolicRegressionConstantOptimizationEvaluator>(ConstantOptimizationEvaluatorParameterName, "The operator used to perform the constant optimization"));
96
97      //Changed the ActualName of the EvaluationPartitionParameter so that it matches the parameter name of symbolic regression problems.
98      ConstantOptimizationEvaluator.EvaluationPartitionParameter.ActualName = "FitnessCalculationPartition";
[5607]99    }
100
[8844]101
102    private double[] qualitiesBeforeCoOp = null;
103    private int[] scopeIndexes = null;
[8849]104    void IStatefulItem.InitializeState() {
105      qualitiesBeforeCoOp = null;
106      scopeIndexes = null;
107    }
108    void IStatefulItem.ClearState() {
109      qualitiesBeforeCoOp = null;
110      scopeIndexes = null;
111    }
112
[5607]113    public override IOperation Apply() {
[8976]114      //code executed in the first call of analyzer
[8844]115      if (qualitiesBeforeCoOp == null) {
116        double[] trainingQuality;
117        // sort is ascending and we take the first n% => order so that best solutions are smallest
118        // sort order is determined by maximization parameter
119        if (Maximization.Value) {
120          // largest values must be sorted first
121          trainingQuality = Quality.Select(x => -x.Value).ToArray();
122        } else {
123          // smallest values must be sorted first
124          trainingQuality = Quality.Select(x => x.Value).ToArray();
125        }
126        // sort trees by training qualities
127        int topN = (int)Math.Max(trainingQuality.Length * PercentageOfBestSolutions, 1);
128        scopeIndexes = Enumerable.Range(0, trainingQuality.Length).ToArray();
129        Array.Sort(trainingQuality, scopeIndexes);
130        scopeIndexes = scopeIndexes.Take(topN).ToArray();
131        qualitiesBeforeCoOp = scopeIndexes.Select(x => Quality[x].Value).ToArray();
[5759]132
[8844]133        OperationCollection operationCollection = new OperationCollection();
134        operationCollection.Parallel = true;
135        foreach (var scopeIndex in scopeIndexes) {
136          var childOperation = ExecutionContext.CreateChildOperation(ConstantOptimizationEvaluator, ExecutionContext.Scope.SubScopes[scopeIndex]);
137          operationCollection.Add(childOperation);
138        }
139
140        return new OperationCollection { operationCollection, ExecutionContext.CreateOperation(this) };
[7721]141      }
142
[8844]143      //code executed to analyze results of constant optimization
144      double[] qualitiesAfterCoOp = scopeIndexes.Select(x => Quality[x].Value).ToArray();
145      var qualityImprovement = qualitiesBeforeCoOp.Zip(qualitiesAfterCoOp, (b, a) => a - b).ToArray();
[7721]146
[8844]147      if (!ResultCollection.ContainsKey(DataTableNameConstantOptimizationImprovement)) {
148        var dataTable = new DataTable(DataTableNameConstantOptimizationImprovement);
149        ResultCollection.Add(new Result(DataTableNameConstantOptimizationImprovement, dataTable));
150        dataTable.VisualProperties.YAxisTitle = "R²";
[7721]151
[8844]152        dataTable.Rows.Add(new DataRow(DataRowNameMinimumImprovement));
153        MinimumImprovement.VisualProperties.StartIndexZero = true;
[6728]154
[8893]155        dataTable.Rows.Add(new DataRow(DataRowNameMedianImprovement));
156        MedianImprovement.VisualProperties.StartIndexZero = true;
157
[8844]158        dataTable.Rows.Add(new DataRow(DataRowNameAverageImprovement));
159        AverageImprovement.VisualProperties.StartIndexZero = true;
160
161        dataTable.Rows.Add(new DataRow(DataRowNameMaximumImprovement));
162        MaximumImprovement.VisualProperties.StartIndexZero = true;
[5607]163      }
164
[8844]165      MinimumImprovement.Values.Add(qualityImprovement.Min());
[8893]166      MedianImprovement.Values.Add(qualityImprovement.Median());
[8844]167      AverageImprovement.Values.Add(qualityImprovement.Average());
168      MaximumImprovement.Values.Add(qualityImprovement.Max());
[5607]169
[8844]170      qualitiesBeforeCoOp = null;
171      scopeIndexes = null;
[5607]172      return base.Apply();
173    }
174  }
175}
Note: See TracBrowser for help on using the repository browser.