Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/ConstantOptimizationAnalyzer.cs @ 8845

Last change on this file since 8845 was 8845, checked in by mkommend, 12 years ago

#1976: Corrected access modifiers in cloning and storable ctor of ConstantOptimizationAnalyzer.

File size: 7.6 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Linq;
24using HeuristicLab.Analysis;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Optimization;
29using HeuristicLab.Parameters;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31
32namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
33  /// <summary>
34  /// An operator that optimizes the constants for the best symbolic expression tress in the current generation.
35  /// </summary>
36  [Item("ConstantOptimizationAnalyzer", "An operator that performs a constant optimization on the best symbolic expression trees.")]
37  [StorableClass]
38  public sealed class ConstantOptimizationAnalyzer : SymbolicDataAnalysisSingleObjectiveAnalyzer {
39    private const string PercentageOfBestSolutionsParameterName = "PercentageOfBestSolutions";
40    private const string ConstantOptimizationEvaluatorParameterName = "ConstantOptimizationOperator";
41
42    private const string DataTableNameConstantOptimizationImprovement = "Constant Optimization Improvement";
43    private const string DataRowNameMinimumImprovement = "Minimum improvement";
44    private const string DataRowNameAverageImprovement = "Average improvement";
45    private const string DataRowNameMaximumImprovement = "Maximum improvement";
46
47    #region parameter properties
48    public IFixedValueParameter<PercentValue> PercentageOfBestSolutionsParameter {
49      get { return (IFixedValueParameter<PercentValue>)Parameters[PercentageOfBestSolutionsParameterName]; }
50    }
51
52    public IFixedValueParameter<SymbolicRegressionConstantOptimizationEvaluator> ConstantOptimizationEvaluatorParameter {
53      get { return (IFixedValueParameter<SymbolicRegressionConstantOptimizationEvaluator>)Parameters[ConstantOptimizationEvaluatorParameterName]; }
54    }
55    #endregion
56
57    #region properties
58    public SymbolicRegressionConstantOptimizationEvaluator ConstantOptimizationEvaluator {
59      get { return ConstantOptimizationEvaluatorParameter.Value; }
60    }
61    public double PercentageOfBestSolutions {
62      get { return PercentageOfBestSolutionsParameter.Value.Value; }
63    }
64
65    private DataTable ConstantOptimizationImprovementDataTable {
66      get {
67        IResult result;
68        ResultCollection.TryGetValue("Constant Optimization Improvement", out result);
69        if (result == null) return null;
70        return (DataTable)result.Value;
71      }
72    }
73    private DataRow MinimumImprovement {
74      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameMinimumImprovement]; }
75    }
76    private DataRow AverageImprovement {
77      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameAverageImprovement]; }
78    }
79    private DataRow MaximumImprovement {
80      get { return ConstantOptimizationImprovementDataTable.Rows[DataRowNameMaximumImprovement]; }
81    }
82
83    #endregion
84
85    [StorableConstructor]
86    private ConstantOptimizationAnalyzer(bool deserializing) : base(deserializing) { }
87    private ConstantOptimizationAnalyzer(ConstantOptimizationAnalyzer original, Cloner cloner) : base(original, cloner) { }
88    public override IDeepCloneable Clone(Cloner cloner) { return new ConstantOptimizationAnalyzer(this, cloner); }
89    public ConstantOptimizationAnalyzer()
90      : base() {
91      Parameters.Add(new FixedValueParameter<PercentValue>(PercentageOfBestSolutionsParameterName, "The percentage of the top solutions which should be analyzed.", new PercentValue(0.1)));
92      Parameters.Add(new FixedValueParameter<SymbolicRegressionConstantOptimizationEvaluator>(ConstantOptimizationEvaluatorParameterName, "The operator used to perform the constant optimization"));
93
94      //Changed the ActualName of the EvaluationPartitionParameter so that it matches the parameter name of symbolic regression problems.
95      ConstantOptimizationEvaluator.EvaluationPartitionParameter.ActualName = "FitnessCalculationPartition";
96    }
97
98
99    private double[] qualitiesBeforeCoOp = null;
100    private int[] scopeIndexes = null;
101    public override IOperation Apply() {
102      //code executed for first call of analyzer
103      if (qualitiesBeforeCoOp == null) {
104
105        double[] trainingQuality;
106        // sort is ascending and we take the first n% => order so that best solutions are smallest
107        // sort order is determined by maximization parameter
108        if (Maximization.Value) {
109          // largest values must be sorted first
110          trainingQuality = Quality.Select(x => -x.Value).ToArray();
111        } else {
112          // smallest values must be sorted first
113          trainingQuality = Quality.Select(x => x.Value).ToArray();
114        }
115        // sort trees by training qualities
116        int topN = (int)Math.Max(trainingQuality.Length * PercentageOfBestSolutions, 1);
117        scopeIndexes = Enumerable.Range(0, trainingQuality.Length).ToArray();
118        Array.Sort(trainingQuality, scopeIndexes);
119        scopeIndexes = scopeIndexes.Take(topN).ToArray();
120        qualitiesBeforeCoOp = scopeIndexes.Select(x => Quality[x].Value).ToArray();
121
122        OperationCollection operationCollection = new OperationCollection();
123        operationCollection.Parallel = true;
124        foreach (var scopeIndex in scopeIndexes) {
125          var childOperation = ExecutionContext.CreateChildOperation(ConstantOptimizationEvaluator, ExecutionContext.Scope.SubScopes[scopeIndex]);
126          operationCollection.Add(childOperation);
127        }
128
129        return new OperationCollection { operationCollection, ExecutionContext.CreateOperation(this) };
130      }
131
132      //code executed to analyze results of constant optimization
133      double[] qualitiesAfterCoOp = scopeIndexes.Select(x => Quality[x].Value).ToArray();
134      var qualityImprovement = qualitiesBeforeCoOp.Zip(qualitiesAfterCoOp, (b, a) => a - b).ToArray();
135
136      if (!ResultCollection.ContainsKey(DataTableNameConstantOptimizationImprovement)) {
137        var dataTable = new DataTable(DataTableNameConstantOptimizationImprovement);
138        ResultCollection.Add(new Result(DataTableNameConstantOptimizationImprovement, dataTable));
139        dataTable.VisualProperties.YAxisTitle = "R²";
140
141        dataTable.Rows.Add(new DataRow(DataRowNameMinimumImprovement));
142        MinimumImprovement.VisualProperties.StartIndexZero = true;
143
144        dataTable.Rows.Add(new DataRow(DataRowNameAverageImprovement));
145        AverageImprovement.VisualProperties.StartIndexZero = true;
146
147        dataTable.Rows.Add(new DataRow(DataRowNameMaximumImprovement));
148        MaximumImprovement.VisualProperties.StartIndexZero = true;
149      }
150
151      MinimumImprovement.Values.Add(qualityImprovement.Min());
152      AverageImprovement.Values.Add(qualityImprovement.Average());
153      MaximumImprovement.Values.Add(qualityImprovement.Max());
154
155      qualitiesBeforeCoOp = null;
156      scopeIndexes = null;
157      return base.Apply();
158    }
159  }
160}
Note: See TracBrowser for help on using the repository browser.