Changeset 17984


Ignore:
Timestamp:
05/29/21 15:45:50 (2 weeks ago)
Author:
gkronber
Message:

#3106 updated implementation based on the reply by Moscato

Location:
branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
1 added
6 edited

Legend:

Unmodified
Added
Removed
  • branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/ContinuedFractionRegression/Algorithm.cs

    r17983 r17984  
    44using System.Threading;
    55using HEAL.Attic;
     6using HeuristicLab.Analysis;
    67using HeuristicLab.Common;
    78using HeuristicLab.Core;
     
    1516
    1617namespace HeuristicLab.Algorithms.DataAnalysis.ContinuedFractionRegression {
     18  /// <summary>
     19  /// Implementation of Continuous Fraction Regression (CFR) as described in
     20  /// Pablo Moscato, Haoyuan Sun, Mohammad Nazmul Haque,
     21  /// Analytic Continued Fractions for Regression: A Memetic Algorithm Approach,
     22  /// Expert Systems with Applications, Volume 179, 2021, 115018, ISSN 0957-4174,
     23  /// https://doi.org/10.1016/j.eswa.2021.115018.
     24  /// </summary>
    1725  [Item("Continuous Fraction Regression (CFR)", "TODO")]
    1826  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 999)]
     
    2129    private const string MutationRateParameterName = "MutationRate";
    2230    private const string DepthParameterName = "Depth";
    23     private const string NumGenerationsParameterName = "Depth";
     31    private const string NumGenerationsParameterName = "NumGenerations";
     32    private const string StagnationGenerationsParameterName = "StagnationGenerations";
     33    private const string LocalSearchIterationsParameterName = "LocalSearchIterations";
     34    private const string LocalSearchRestartsParameterName = "LocalSearchRestarts";
     35    private const string LocalSearchToleranceParameterName = "LocalSearchTolerance";
     36    private const string DeltaParameterName = "Delta";
     37    private const string ScaleDataParameterName = "ScaleData";
     38
    2439
    2540    #region parameters
     
    3954      set { NumGenerationsParameter.Value.Value = value; }
    4055    }
     56    public IFixedValueParameter<IntValue> StagnationGenerationsParameter => (IFixedValueParameter<IntValue>)Parameters[StagnationGenerationsParameterName];
     57    public int StagnationGenerations {
     58      get { return StagnationGenerationsParameter.Value.Value; }
     59      set { StagnationGenerationsParameter.Value.Value = value; }
     60    }
     61    public IFixedValueParameter<IntValue> LocalSearchIterationsParameter => (IFixedValueParameter<IntValue>)Parameters[LocalSearchIterationsParameterName];
     62    public int LocalSearchIterations {
     63      get { return LocalSearchIterationsParameter.Value.Value; }
     64      set { LocalSearchIterationsParameter.Value.Value = value; }
     65    }
     66    public IFixedValueParameter<IntValue> LocalSearchRestartsParameter => (IFixedValueParameter<IntValue>)Parameters[LocalSearchRestartsParameterName];
     67    public int LocalSearchRestarts {
     68      get { return LocalSearchRestartsParameter.Value.Value; }
     69      set { LocalSearchRestartsParameter.Value.Value = value; }
     70    }
     71    public IFixedValueParameter<DoubleValue> LocalSearchToleranceParameter => (IFixedValueParameter<DoubleValue>)Parameters[LocalSearchToleranceParameterName];
     72    public double LocalSearchTolerance {
     73      get { return LocalSearchToleranceParameter.Value.Value; }
     74      set { LocalSearchToleranceParameter.Value.Value = value; }
     75    }
     76    public IFixedValueParameter<PercentValue> DeltaParameter => (IFixedValueParameter<PercentValue>)Parameters[DeltaParameterName];
     77    public double Delta {
     78      get { return DeltaParameter.Value.Value; }
     79      set { DeltaParameter.Value.Value = value; }
     80    }
     81    public IFixedValueParameter<BoolValue> ScaleDataParameter => (IFixedValueParameter<BoolValue>)Parameters[ScaleDataParameterName];
     82    public bool ScaleData {
     83      get { return ScaleDataParameter.Value.Value; }
     84      set { ScaleDataParameter.Value.Value = value; }
     85    }
    4186    #endregion
    4287
     
    5196    // default ctor
    5297    public Algorithm() : base() {
     98      Problem = new RegressionProblem();
    5399      Parameters.Add(new FixedValueParameter<PercentValue>(MutationRateParameterName, "Mutation rate (default 10%)", new PercentValue(0.1)));
    54100      Parameters.Add(new FixedValueParameter<IntValue>(DepthParameterName, "Depth of the continued fraction representation (default 6)", new IntValue(6)));
    55101      Parameters.Add(new FixedValueParameter<IntValue>(NumGenerationsParameterName, "The maximum number of generations (default 200)", new IntValue(200)));
     102      Parameters.Add(new FixedValueParameter<IntValue>(StagnationGenerationsParameterName, "Number of generations after which the population is re-initialized (default value 5)", new IntValue(5)));
     103      Parameters.Add(new FixedValueParameter<IntValue>(LocalSearchIterationsParameterName, "Number of iterations for local search (simplex) (default value 250)", new IntValue(250)));
     104      Parameters.Add(new FixedValueParameter<IntValue>(LocalSearchRestartsParameterName, "Number of restarts for local search (default value 4)", new IntValue(4)));
     105      Parameters.Add(new FixedValueParameter<DoubleValue>(LocalSearchToleranceParameterName, "The tolerance value for local search (simplex) (default value: 1e-3)", new DoubleValue(1e-3)));
     106      Parameters.Add(new FixedValueParameter<PercentValue>(DeltaParameterName, "The relative weight for the number of variables term in the fitness function (default value: 10%)", new PercentValue(0.1)));
     107      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleDataParameterName, "Turns on/off scaling of input variable values to the range [0 .. 1] (default: false)", new BoolValue(false)));
    56108    }
    57109
    58110    public override IDeepCloneable Clone(Cloner cloner) {
    59       throw new NotImplementedException();
     111      return new Algorithm(this, cloner);
    60112    }
    61113
    62114    protected override void Run(CancellationToken cancellationToken) {
    63115      var problemData = Problem.ProblemData;
    64 
    65       var x = problemData.Dataset.ToArray(problemData.AllowedInputVariables.Concat(new[] { problemData.TargetVariable }),
    66         problemData.TrainingIndices);
    67       var nVars = x.GetLength(1) - 1;
     116      double[,] xy;
     117      if (ScaleData) {
     118        // Scale data to range 0 .. 1
     119        //
     120        // Scaling was not used for the experiments in the paper. Statement by the authors: "We did not pre-process the data."
     121        var transformations = new List<Transformation<double>>();
     122        foreach (var input in problemData.AllowedInputVariables) {
     123          var values = problemData.Dataset.GetDoubleValues(input, problemData.TrainingIndices);
     124          var linTransformation = new LinearTransformation(problemData.AllowedInputVariables);
     125          var min = values.Min();
     126          var max = values.Max();
     127          var range = max - min;
     128          linTransformation.Addend = -min / range;
     129          linTransformation.Multiplier = 1.0 / range;
     130          transformations.Add(linTransformation);
     131        }
     132        // do not scale the target
     133        transformations.Add(new LinearTransformation(problemData.AllowedInputVariables) { Addend = 0.0, Multiplier = 1.0 });
     134        xy = problemData.Dataset.ToArray(problemData.AllowedInputVariables.Concat(new[] { problemData.TargetVariable }),
     135          transformations,
     136          problemData.TrainingIndices);
     137      } else {
     138        // no transformation
     139        xy = problemData.Dataset.ToArray(problemData.AllowedInputVariables.Concat(new[] { problemData.TargetVariable }),
     140          problemData.TrainingIndices);
     141      }
     142      var nVars = xy.GetLength(1) - 1;
    68143      var seed = new System.Random().Next();
    69144      var rand = new MersenneTwister((uint)seed);
    70       CFRAlgorithm(nVars, Depth, MutationRate, x, out var best, out var bestObj, rand, NumGenerations, stagnatingGens: 5, cancellationToken);
     145      CFRAlgorithm(nVars, Depth, MutationRate, xy, out var bestObj, rand, NumGenerations, StagnationGenerations,
     146        Delta,
     147        LocalSearchIterations, LocalSearchRestarts, LocalSearchTolerance, cancellationToken);
    71148    }
    72149
    73150    private void CFRAlgorithm(int nVars, int depth, double mutationRate, double[,] trainingData,
    74       out ContinuedFraction best, out double bestObj,
    75       IRandom rand, int numGen, int stagnatingGens,
     151      out double bestObj,
     152      IRandom rand, int numGen, int stagnatingGens, double evalDelta,
     153      int localSearchIterations, int localSearchRestarts, double localSearchTolerance,
    76154      CancellationToken cancellationToken) {
    77155      /* Algorithm 1 */
    78156      /* Generate initial population by a randomized algorithm */
    79157      var pop = InitialPopulation(nVars, depth, rand, trainingData);
    80       best = pop.pocket;
    81158      bestObj = pop.pocketObjValue;
    82       var bestObjGen = 0;
     159      // the best value since the last reset
     160      var episodeBestObj = pop.pocketObjValue;
     161      var episodeBestObjGen = 0;
    83162      for (int gen = 1; gen <= numGen && !cancellationToken.IsCancellationRequested; gen++) {
    84163        /* mutate each current solution in the population */
     
    87166        var pop_r = RecombinePopulation(pop_mu, rand, nVars);
    88167
     168        // "We ran the Local Search after Mutation and recombination operations. We executed the local-search only on the Current solutions."
     169        // "We executed the MaintainInvariant() in the following steps:
     170        // - After generating the initial population
     171        // - after resetting the root
     172        // - after executing the local-search on the whole population.
     173        // We updated the pocket/ current automatically after mutation and recombination operation."
     174
    89175        /* local search optimization of current solutions */
    90176        foreach (var agent in pop_r.IterateLevels()) {
    91           LocalSearchSimplex(agent.current, ref agent.currentObjValue, trainingData, rand); // CHECK paper states that pocket might also be optimized. Unclear how / when invariants are maintained.
    92         }
    93 
    94         foreach (var agent in pop_r.IteratePostOrder()) agent.MaintainInvariant(); // CHECK deviates from Alg1 in paper
     177          LocalSearchSimplex(localSearchIterations, localSearchRestarts, localSearchTolerance, evalDelta, agent.current, ref agent.currentObjValue, trainingData, rand);
     178        }
     179        foreach (var agent in pop_r.IteratePostOrder()) agent.MaintainInvariant(); // post-order to make sure that the root contains the best model
     180
     181
     182        // for detecting stagnation we track the best objective value since the last reset
     183        // and reset if this does not change for stagnatingGens
     184        if (gen > episodeBestObjGen + stagnatingGens) {
     185          Reset(pop_r, nVars, depth, rand, trainingData);
     186          episodeBestObj = double.MaxValue;
     187        }
     188        if (episodeBestObj > pop_r.pocketObjValue) {
     189          episodeBestObjGen = gen; // wait at least stagnatingGens until resetting again
     190          episodeBestObj = pop_r.pocketObjValue;
     191        }
    95192
    96193        /* replace old population with evolved population */
     
    98195
    99196        /* keep track of the best solution */
    100         if (bestObj > pop.pocketObjValue) { // CHECK: comparison obviously wrong in the paper
    101           best = pop.pocket;
     197        if (bestObj > pop.pocketObjValue) {
    102198          bestObj = pop.pocketObjValue;
    103           bestObjGen = gen;
    104           // Results.AddOrUpdateResult("MSE (best)", new DoubleValue(bestObj));
    105           // Results.AddOrUpdateResult("Solution", CreateSymbolicRegressionSolution(best, Problem.ProblemData));
    106         }
    107 
    108 
    109         if (gen > bestObjGen + stagnatingGens) {
    110           bestObjGen = gen; // CHECK: unspecified in the paper: wait at least stagnatingGens until resetting again
    111           Reset(pop, nVars, depth, rand, trainingData);
    112           // InitialPopulation(nVars, depth, rand, trainingData); CHECK reset is not specified in the paper
    113         }
     199          Results.AddOrUpdateResult("MSE (best)", new DoubleValue(bestObj));
     200          Results.AddOrUpdateResult("Solution", CreateSymbolicRegressionSolution(pop.pocket, trainingData, Problem.ProblemData.AllowedInputVariables.ToArray(), Problem.ProblemData.TargetVariable));
     201        }
     202
     203        #region visualization and debugging
     204        DataTable qualities;
     205        int i = 0;
     206        if (Results.TryGetValue("Qualities", out var qualitiesResult)) {
     207          qualities = (DataTable)qualitiesResult.Value;
     208        } else {
     209          qualities = new DataTable("Qualities", "Qualities");
     210          i = 0;
     211          foreach (var node in pop.IterateLevels()) {
     212            qualities.Rows.Add(new DataRow($"Quality {i} pocket", "Quality of pocket"));
     213            qualities.Rows.Add(new DataRow($"Quality {i} current", "Quality of current"));
     214            i++;
     215          }
     216          Results.AddOrUpdateResult("Qualities", qualities);
     217        }
     218        i = 0;
     219        foreach (var node in pop.IterateLevels()) {
     220          qualities.Rows[$"Quality {i} pocket"].Values.Add(node.pocketObjValue);
     221          qualities.Rows[$"Quality {i} current"].Values.Add(node.currentObjValue);
     222          i++;
     223        }
     224        #endregion
    114225      }
    115226    }
     
    128239      }
    129240
     241      // Statement by the authors: "Yes, we use post-order traversal here"
    130242      foreach (var agent in pop.IteratePostOrder()) {
    131243        agent.current = new ContinuedFraction(nVars, depth, rand);
    132244        agent.pocket = new ContinuedFraction(nVars, depth, rand);
    133245
    134         agent.currentObjValue = Evaluate(agent.current, trainingData);
    135         agent.pocketObjValue = Evaluate(agent.pocket, trainingData);
     246        agent.currentObjValue = Evaluate(agent.current, trainingData, Delta);
     247        agent.pocketObjValue = Evaluate(agent.pocket, trainingData, Delta);
    136248
    137249        /* within each agent, the pocket solution always holds the better value of guiding
     
    143255    }
    144256
    145     // TODO: reset is not described in the paper
     257    // Reset is not described in detail in the paper.
     258    // Statement by the authors: "We only replaced the pocket solution of the root with
     259    // a randomly generated solution. Then we execute the maintain-invariant process.
     260    // It does not initialize the solutions in the entire population."
    146261    private void Reset(Agent root, int nVars, int depth, IRandom rand, double[,] trainingData) {
    147262      root.pocket = new ContinuedFraction(nVars, depth, rand);
    148263      root.current = new ContinuedFraction(nVars, depth, rand);
    149264
    150       root.currentObjValue = Evaluate(root.current, trainingData);
    151       root.pocketObjValue = Evaluate(root.pocket, trainingData);
    152 
    153       /* within each agent, the pocket solution always holds the better value of guiding
    154        * function than its current solution
    155        */
    156       root.MaintainInvariant();
     265      root.currentObjValue = Evaluate(root.current, trainingData, Delta);
     266      root.pocketObjValue = Evaluate(root.pocket, trainingData, Delta);
     267
     268      foreach (var agent in root.IteratePreOrder()) { agent.MaintainInvariant(); } // Here we push the newly created model down the hierarchy.
    157269    }
    158270
     
    167279        var s3 = pop.children[2];
    168280
    169         // CHECK Deviates from paper (recombine all models in the current pop before updating the population)
    170         var l_current = Recombine(l.pocket, s1.current, SelectRandomOp(rand), rand, nVars);
    171         var s3_current = Recombine(s3.pocket, l.current, SelectRandomOp(rand), rand, nVars);
    172         var s1_current = Recombine(s1.pocket, s2.current, SelectRandomOp(rand), rand, nVars);
    173         var s2_current = Recombine(s2.pocket, s3.current, SelectRandomOp(rand), rand, nVars);
     281        // Statement by the authors: "we are using recently generated solutions.
     282        // For an example, in step 1 we got the new current(l), which is being used in
     283        // Step 2 to generate current(s3). The current(s3) from Step 2 is being used at
     284        // Step 4. These steps are executed sequentially from 1 to 4. Similarly, in the
     285        // recombination of lower-level subpopulations, we will have the new current
     286        // (the supporters generated at the previous level) as the leader of the subpopulation.
     287        l.current = Recombine(l.pocket, s1.current, SelectRandomOp(rand), rand, nVars);
     288        s3.current = Recombine(s3.pocket, l.current, SelectRandomOp(rand), rand, nVars);
     289        s1.current = Recombine(s1.pocket, s2.current, SelectRandomOp(rand), rand, nVars);
     290        s2.current = Recombine(s2.pocket, s3.current, SelectRandomOp(rand), rand, nVars);
    174291
    175292        // recombination works from top to bottom
    176         // CHECK do we use the new current solutions (s1_current .. s3_current) already in the next levels?
    177293        foreach (var child in pop.children) {
    178294          RecombinePopulation(child, rand, nVars);
    179295        }
    180296
    181         l.current = l_current;
    182         s3.current = s3_current;
    183         s1.current = s1_current;
    184         s2.current = s2_current;
    185297      }
    186298      return pop;
    187     }
    188 
    189     private Func<bool[], bool[], bool[]> SelectRandomOp(IRandom rand) {
    190       bool[] union(bool[] a, bool[] b) {
    191         var res = new bool[a.Length];
    192         for (int i = 0; i < a.Length; i++) res[i] = a[i] || b[i];
    193         return res;
    194       }
    195       bool[] intersect(bool[] a, bool[] b) {
    196         var res = new bool[a.Length];
    197         for (int i = 0; i < a.Length; i++) res[i] = a[i] && b[i];
    198         return res;
    199       }
    200       bool[] symmetricDifference(bool[] a, bool[] b) {
    201         var res = new bool[a.Length];
    202         for (int i = 0; i < a.Length; i++) res[i] = a[i] ^ b[i];
    203         return res;
    204       }
    205       switch (rand.Next(3)) {
    206         case 0: return union;
    207         case 1: return intersect;
    208         case 2: return symmetricDifference;
    209         default: throw new ArgumentException();
    210       }
    211299    }
    212300
     
    223311        /* recombine coefficient values for variables */
    224312        var coefx = new double[nVars];
    225         var varsx = new bool[nVars]; // CHECK: deviates from paper, probably forgotten in the pseudo-code
    226         for (int vi = 1; vi < nVars; vi++) {
     313        var varsx = new bool[nVars]; // deviates from paper, probably forgotten in the pseudo-code
     314        for (int vi = 0; vi < nVars; vi++) {
    227315          if (ch.vars[vi]) {  // CHECK: paper uses featAt()
    228316            if (varsa[vi] && varsb[vi]) {
     
    244332        ch.h[i].beta = p1.h[i].beta + (rand.NextDouble() * 5 - 1) * (p2.h[i].beta - p1.h[i].beta) / 3.0;
    245333      }
    246       /* update current solution and apply local search */
    247       // return LocalSearchSimplex(ch, trainingData); // CHECK: Deviates from paper because Alg1 also has LocalSearch after Recombination
     334      // return LocalSearchSimplex(ch, trainingData); // The paper has a local search step here.
     335      // The authors have stated that local search is executed after mutation and recombination
     336      // for the current solutions.
     337      // Local search and MaintainInvariant is called in the main loop (Alg 1)
    248338      return ch;
    249339    }
     
    253343        if (rand.NextDouble() < mutationRate) {
    254344          if (agent.currentObjValue < 1.2 * agent.pocketObjValue ||
    255              agent.currentObjValue > 2 * agent.pocketObjValue)
     345              agent.currentObjValue > 2 * agent.pocketObjValue)
    256346            ToggleVariables(agent.current, rand); // major mutation
    257347          else
     
    301391      var h = cfrac.h[rand.Next(cfrac.h.Length)];
    302392
    303       /* modify the coefficient value*/
     393      /* modify the coefficient value */
    304394      if (h.vars[vIdx]) {  // CHECK: paper uses varAt()
    305395        h.coef[vIdx] = 0.0;
     
    311401    }
    312402
    313     private static double Evaluate(ContinuedFraction cfrac, double[,] trainingData) {
     403    private static double Evaluate(ContinuedFraction cfrac, double[,] trainingData, double delta) {
    314404      var dataPoint = new double[trainingData.GetLength(1) - 1];
    315405      var yIdx = trainingData.GetLength(1) - 1;
     
    324414        sum += res * res;
    325415      }
    326       var delta = 0.1;
    327416      return sum / trainingData.GetLength(0) * (1 + delta * cfrac.vars.Count(vi => vi));
    328417    }
     
    342431    }
    343432
     433
     434    private Func<bool[], bool[], bool[]> SelectRandomOp(IRandom rand) {
     435      bool[] union(bool[] a, bool[] b) {
     436        var res = new bool[a.Length];
     437        for (int i = 0; i < a.Length; i++) res[i] = a[i] || b[i];
     438        return res;
     439      }
     440      bool[] intersect(bool[] a, bool[] b) {
     441        var res = new bool[a.Length];
     442        for (int i = 0; i < a.Length; i++) res[i] = a[i] && b[i];
     443        return res;
     444      }
     445      bool[] symmetricDifference(bool[] a, bool[] b) {
     446        var res = new bool[a.Length];
     447        for (int i = 0; i < a.Length; i++) res[i] = a[i] ^ b[i];
     448        return res;
     449      }
     450      switch (rand.Next(3)) {
     451        case 0: return union;
     452        case 1: return intersect;
     453        case 2: return symmetricDifference;
     454        default: throw new ArgumentException();
     455      }
     456    }
     457
    344458    private static double dot(bool[] filter, double[] x, double[] y) {
    345459      var s = 0.0;
     
    351465
    352466
    353     private static void LocalSearchSimplex(ContinuedFraction ch, ref double quality, double[,] trainingData, IRandom rand) {
     467    private static void LocalSearchSimplex(int iterations, int restarts, double tolerance, double delta, ContinuedFraction ch, ref double quality, double[,] trainingData, IRandom rand) {
    354468      double uniformPeturbation = 1.0;
    355       double tolerance = 1e-3;
    356       int maxEvals = 250;
    357       int numSearches = 4;
     469      int maxEvals = iterations;
     470      int numSearches = restarts + 1;
    358471      var numRows = trainingData.GetLength(0);
    359472      int numSelectedRows = numRows / 5; // 20% of the training samples
    360473
    361       quality = Evaluate(ch, trainingData); // get quality with origial coefficients
     474      quality = Evaluate(ch, trainingData, delta); // get quality with origial coefficients
    362475
    363476      double[] origCoeff = ExtractCoeff(ch);
     
    371484      double objFunc(double[] curCoeff) {
    372485        SetCoeff(ch, curCoeff);
    373         return Evaluate(ch, fittingData);
     486        return Evaluate(ch, fittingData, delta);
    374487      }
    375488
     
    386499        SetCoeff(ch, optimizedCoeff);
    387500
    388         var newQuality = Evaluate(ch, trainingData);
     501        var newQuality = Evaluate(ch, trainingData, delta);
    389502
    390503        if (newQuality < bestQuality) {
     
    434547    }
    435548
     549    #region build a symbolic expression tree
    436550    Symbol addSy = new Addition();
    437     Symbol mulSy = new Multiplication();
    438551    Symbol divSy = new Division();
    439552    Symbol startSy = new StartSymbol();
     
    442555    Symbol varSy = new Problems.DataAnalysis.Symbolic.Variable();
    443556
    444     private ISymbolicRegressionSolution CreateSymbolicRegressionSolution(ContinuedFraction cfrac, IRegressionProblemData problemData) {
    445       var variables = problemData.AllowedInputVariables.ToArray();
     557    private ISymbolicRegressionSolution CreateSymbolicRegressionSolution(ContinuedFraction cfrac, double[,] trainingData, string[] variables, string targetVariable) {
    446558      ISymbolicExpressionTreeNode res = null;
    447559      for (int i = cfrac.h.Length - 1; i > 1; i -= 2) {
     
    469581      start.AddSubtree(h0Term);
    470582
    471       var model = new SymbolicRegressionModel(problemData.TargetVariable, new SymbolicExpressionTree(progRoot), new SymbolicDataAnalysisExpressionTreeBatchInterpreter());
    472       var sol = new SymbolicRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
     583      var model = new SymbolicRegressionModel(targetVariable, new SymbolicExpressionTree(progRoot), new SymbolicDataAnalysisExpressionTreeBatchInterpreter());
     584      var ds = new Dataset(variables.Concat(new[] { targetVariable }), trainingData);
     585      var problemData = new RegressionProblemData(ds, variables, targetVariable);
     586      var sol = new SymbolicRegressionSolution(model, problemData);
    473587      return sol;
    474588    }
     
    494608    }
    495609  }
    496 
    497   public class Agent {
    498     public ContinuedFraction pocket;
    499     public double pocketObjValue;
    500     public ContinuedFraction current;
    501     public double currentObjValue;
    502 
    503     public IList<Agent> children = new List<Agent>();
    504 
    505     public IEnumerable<Agent> IterateLevels() {
    506       var agents = new List<Agent>() { this };
    507       IterateLevelsRec(this, agents);
    508       return agents;
    509     }
    510     public IEnumerable<Agent> IteratePostOrder() {
    511       var agents = new List<Agent>();
    512       IteratePostOrderRec(this, agents);
    513       return agents;
    514     }
    515 
    516     internal void MaintainInvariant() {
    517       foreach (var child in children) {
    518         MaintainInvariant(parent: this, child);
    519       }
    520       if (currentObjValue < pocketObjValue) {
    521         Swap(ref pocket, ref current);
    522         Swap(ref pocketObjValue, ref currentObjValue);
    523       }
    524     }
    525 
    526 
    527     private static void MaintainInvariant(Agent parent, Agent child) {
    528       if (child.pocketObjValue < parent.pocketObjValue) {
    529         Swap(ref child.pocket, ref parent.pocket);
    530         Swap(ref child.pocketObjValue, ref parent.pocketObjValue);
    531       }
    532     }
    533 
    534     private void IterateLevelsRec(Agent agent, List<Agent> agents) {
    535       foreach (var child in agent.children) {
    536         agents.Add(child);
    537       }
    538       foreach (var child in agent.children) {
    539         IterateLevelsRec(child, agents);
    540       }
    541     }
    542 
    543     private void IteratePostOrderRec(Agent agent, List<Agent> agents) {
    544       foreach (var child in agent.children) {
    545         IteratePostOrderRec(child, agents);
    546       }
    547       agents.Add(agent);
    548     }
    549 
    550 
    551     private static void Swap<T>(ref T a, ref T b) {
    552       var temp = a;
    553       a = b;
    554       b = temp;
    555     }
    556   }
     610  #endregion
    557611}
  • branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/ContinuedFractionRegression/ContinuedFraction.cs

    r17971 r17984  
    11using HeuristicLab.Core;
    2 using HeuristicLab.Problems.DataAnalysis;
    32
    43namespace HeuristicLab.Algorithms.DataAnalysis.ContinuedFractionRegression {
  • branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/ContinuedFractionRegression/NelderMeadSimplex.cs

    r17848 r17984  
    11using System;
    2 using System.Collections.Generic;
    3 using System.Linq;
    4 using System.Text;
    5 using System.Threading.Tasks;
    62
    73namespace HeuristicLab.Algorithms.DataAnalysis.ContinuedFractionRegression {
  • branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/ContinuedFractionRegression/Transformation.cs

    r17848 r17984  
    11using System;
    2 using System.Collections.Generic;
    3 using System.Linq;
    4 using System.Text;
    5 using System.Threading.Tasks;
    62using HeuristicLab.Data;
    73
  • branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/ContinuedFractionRegression/Vector.cs

    r17848 r17984  
    11using System;
    2 using System.Collections.Generic;
    3 using System.Linq;
    4 using System.Text;
    5 using System.Threading.Tasks;
    62
    73namespace HeuristicLab.Algorithms.DataAnalysis.ContinuedFractionRegression {
  • branches/3106_AnalyticContinuedFractionsRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r17971 r17984  
    136136    <Compile Include="BaselineClassifiers\OneRClassificationSolution.cs" />
    137137    <Compile Include="BaselineClassifiers\ZeroR.cs" />
     138    <Compile Include="ContinuedFractionRegression\Agent.cs" />
    138139    <Compile Include="ContinuedFractionRegression\Algorithm.cs" />
    139140    <Compile Include="ContinuedFractionRegression\ContinuedFraction.cs" />
Note: See TracChangeset for help on using the changeset viewer.