Free cookie consent management tool by TermsFeed Policy Generator

Changeset 17726


Ignore:
Timestamp:
08/26/20 16:43:25 (4 years ago)
Author:
pfleck
Message:

#3040 Added a constant opt evaluator for vectors that uses the existing AutoDiff library by unrolling all vector operations.

Location:
branches/3040_VectorBasedGP
Files:
7 edited
3 copied

Legend:

Unmodified
Added
Removed
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4.csproj

    r17502 r17726  
    141141    <Compile Include="Plugin.cs" />
    142142    <Compile Include="SingleObjective\ConstantOptimizationAnalyzer.cs" />
     143    <Compile Include="SingleObjective\Evaluators\VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator.cs" />
    143144    <Compile Include="SingleObjective\Evaluators\NonlinearLeastSquaresConstantOptimizationEvaluator.cs" />
    144145    <Compile Include="SingleObjective\Evaluators\SymbolicRegressionMeanRelativeErrorEvaluator.cs" />
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator.cs

    r17725 r17726  
    3030using HeuristicLab.Parameters;
    3131using HEAL.Attic;
     32using DoubleVector = MathNet.Numerics.LinearAlgebra.Vector<double>;
    3233
    3334namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
    34   [StorableType("24B68851-036D-4446-BD6F-3823E9028FF4")]
    35   [Item("NonlinearLeastSquaresOptimizer", "")]
    36   public class NonlinearLeastSquaresConstantOptimizationEvaluator : SymbolicRegressionConstantOptimizationEvaluator {
     35  [StorableType("5F8DB251-C6F7-40AC-BC30-3C55AF655A2F")]
     36  [Item("VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator", "")]
     37  public class VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator : SymbolicRegressionConstantOptimizationEvaluator {
    3738
    3839    private const string ConstantOptimizationIterationsName = "ConstantOptimizationIterations";
     
    5152    #endregion
    5253
    53     public NonlinearLeastSquaresConstantOptimizationEvaluator()
     54    public VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator()
    5455      : base() {
    5556      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree(0 indicates other or default stopping criterion).", new IntValue(10)));
    5657    }
    5758
    58     protected NonlinearLeastSquaresConstantOptimizationEvaluator(NonlinearLeastSquaresConstantOptimizationEvaluator original, Cloner cloner)
     59    protected VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator(VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator original, Cloner cloner)
    5960      : base(original, cloner) {
    6061    }
    6162    public override IDeepCloneable Clone(Cloner cloner) {
    62       return new NonlinearLeastSquaresConstantOptimizationEvaluator(this, cloner);
     63      return new VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator(this, cloner);
    6364    }
    6465    [StorableConstructor]
    65     protected NonlinearLeastSquaresConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
     66    protected VectorUnrollingNonlinearLeastSquaresConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
    6667
    6768    protected override ISymbolicExpressionTree OptimizeConstants(
    6869      ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows,
    6970      CancellationToken cancellationToken = default(CancellationToken), EvaluationsCounter counter = null) {
    70       return OptimizeTree(tree,
     71      return OptimizeTree(tree, (SymbolicDataAnalysisExpressionTreeVectorInterpreter)SymbolicDataAnalysisTreeInterpreterParameter.ActualValue,
    7172        problemData, rows,
    7273        ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations, UpdateVariableWeights,
     
    7677    public static ISymbolicExpressionTree OptimizeTree(
    7778      ISymbolicExpressionTree tree,
     79      SymbolicDataAnalysisExpressionTreeVectorInterpreter interpreter,
    7880      IRegressionProblemData problemData, IEnumerable<int> rows,
    7981      bool applyLinearScaling, int maxIterations, bool updateVariableWeights,
    8082      CancellationToken cancellationToken = default(CancellationToken), EvaluationsCounter counter = null, Action<double[], double, object> iterationCallback = null) {
     83
     84      var vectorLengths = problemData.Dataset.DoubleVectorVariables
     85        .SelectMany(var => problemData.Dataset.GetDoubleVectorValues(var, rows))
     86        .Select(v => v.Count);
     87      var vectorlength = vectorLengths.First();
     88      if (vectorLengths.Any(l => l != vectorlength))
     89        throw new InvalidOperationException("All vectors must be of same length.");
     90      var evaluationTraces = interpreter.GetIntermediateNodeValues(tree, problemData.Dataset, rows);
     91      var evaluationTrace = evaluationTraces.First(); // assume all vector lengths are the same
     92
    8193
    8294      // numeric constants in the tree become variables for constant opt
     
    8597      // variable name, variable value (for factor vars) and lag as a DataForVariable object.
    8698      // A dictionary is used to find parameters
    87       bool success = TreeToAutoDiffTermConverter.TryConvertToAutoDiff(
    88         tree, updateVariableWeights, applyLinearScaling,
     99      bool success = VectorUnrollingTreeToAutoDiffTermConverter.TryConvertToAutoDiff(
     100        tree, evaluationTrace,
     101        updateVariableWeights, applyLinearScaling,
    89102        out var parameters, out var initialConstants, out var func, out var func_grad);
    90103      if (!success)
     
    114127          } else if (ds.VariableHasType<string>(info.variableName)) {
    115128            x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
     129          } else if (ds.VariableHasType<DoubleVector>(info.variableName)) {
     130            x[row, col] = ds.GetDoubleVectorValue(info.variableName, r)[info.index];
    116131          } else throw new InvalidProgramException("found a variable of unknown type");
    117132          col++;
     
    182197    }
    183198
    184     private static alglib.ndimensional_pfunc CreatePFunc(TreeToAutoDiffTermConverter.ParametricFunction func) {
     199    private static alglib.ndimensional_pfunc CreatePFunc(VectorUnrollingTreeToAutoDiffTermConverter.ParametricFunction func) {
    185200      return (double[] c, double[] x, ref double fx, object o) => {
    186201        fx = func(c, x);
     
    190205    }
    191206
    192     private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {
     207    private static alglib.ndimensional_pgrad CreatePGrad(VectorUnrollingTreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {
    193208      return (double[] c, double[] x, ref double fx, double[] grad, object o) => {
    194209        var tuple = func_grad(c, x);
     
    201216
    202217    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
    203       return TreeToAutoDiffTermConverter.IsCompatible(tree);
     218      return VectorUnrollingTreeToAutoDiffTermConverter.IsCompatible(tree);
    204219    }
    205220  }
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Converters/VectorUnrollingTreeToAutoDiffTermConverter.cs

    r17725 r17726  
    2525using System.Runtime.Serialization;
    2626using AutoDiff;
     27using HeuristicLab.Common;
    2728using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2829
    2930namespace HeuristicLab.Problems.DataAnalysis.Symbolic {
    30   public class TreeToAutoDiffTermConverter {
     31  public class VectorUnrollingTreeToAutoDiffTermConverter {
    3132    public delegate double ParametricFunction(double[] vars, double[] @params);
    3233
     
    3839      public readonly string variableValue; // for factor vars
    3940      public readonly int lag;
    40 
    41       public DataForVariable(string varName, string varValue, int lag) {
     41      public readonly int index; // for vectors
     42
     43      public DataForVariable(string varName, string varValue, int lag, int index) {
    4244        this.variableName = varName;
    4345        this.variableValue = varValue;
    4446        this.lag = lag;
     47        this.index = index;
    4548      }
    4649
     
    5053        return other.variableName.Equals(this.variableName) &&
    5154               other.variableValue.Equals(this.variableValue) &&
    52                other.lag == this.lag;
     55               other.lag == this.lag &&
     56               other.index == this.index;
    5357      }
    5458
    5559      public override int GetHashCode() {
    56         return variableName.GetHashCode() ^ variableValue.GetHashCode() ^ lag;
     60        return variableName.GetHashCode() ^ variableValue.GetHashCode() ^ lag ^ index;
    5761      }
    5862    }
     
    101105    #endregion
    102106
    103     public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable, bool addLinearScalingTerms,
     107    public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree,
     108      IDictionary<ISymbolicExpressionTreeNode, SymbolicDataAnalysisExpressionTreeVectorInterpreter.EvaluationResult> evaluationTrace,
     109      bool makeVariableWeightsVariable, bool addLinearScalingTerms,
    104110      out List<DataForVariable> parameters, out double[] initialConstants,
    105111      out ParametricFunction func,
     
    107113
    108114      // use a transformator object which holds the state (variable list, parameter list, ...) for recursive transformation of the tree
    109       var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable, addLinearScalingTerms);
    110       AutoDiff.Term term;
     115      var transformator = new VectorUnrollingTreeToAutoDiffTermConverter(evaluationTrace,
     116        makeVariableWeightsVariable, addLinearScalingTerms);
     117      Term term;
    111118      try {
    112         term = transformator.ConvertToAutoDiff(tree.Root.GetSubtree(0));
     119        term = transformator.ConvertToAutoDiff(tree.Root.GetSubtree(0)).Single();
    113120        var parameterEntries = transformator.parameters.ToArray(); // guarantee same order for keys and values
    114121        var compiledTerm = term.Compile(transformator.variables.ToArray(),
     
    128135    }
    129136
     137    private readonly IDictionary<ISymbolicExpressionTreeNode, SymbolicDataAnalysisExpressionTreeVectorInterpreter.EvaluationResult> evaluationTrace;
    130138    // state for recursive transformation of trees
    131     private readonly
    132     List<double> initialConstants;
     139    private readonly List<double> initialConstants;
    133140    private readonly Dictionary<DataForVariable, AutoDiff.Variable> parameters;
    134141    private readonly List<AutoDiff.Variable> variables;
     
    136143    private readonly bool addLinearScalingTerms;
    137144
    138     private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable, bool addLinearScalingTerms) {
     145    private VectorUnrollingTreeToAutoDiffTermConverter(IDictionary<ISymbolicExpressionTreeNode, SymbolicDataAnalysisExpressionTreeVectorInterpreter.EvaluationResult> evaluationTrace,
     146      bool makeVariableWeightsVariable, bool addLinearScalingTerms) {
     147      this.evaluationTrace = evaluationTrace;
    139148      this.makeVariableWeightsVariable = makeVariableWeightsVariable;
    140149      this.addLinearScalingTerms = addLinearScalingTerms;
     
    144153    }
    145154
    146     private AutoDiff.Term ConvertToAutoDiff(ISymbolicExpressionTreeNode node) {
    147       if (node.Symbol is Constant) {
     155    private IList<AutoDiff.Term> ConvertToAutoDiff(ISymbolicExpressionTreeNode node) {
     156      IList<Term> BinaryOp(Func<Term, Term, Term> binaryOp, Func<Term, Term> singleElementOp, params IList<Term>[] terms) {
     157        if (terms.Length == 1) return terms[0].Select(singleElementOp).ToList();
     158        return terms.Aggregate((acc, vectorizedTerm) => acc.Zip(vectorizedTerm, binaryOp).ToList());
     159      }
     160      IList<Term> BinaryOp2(Func<Term, Term, Term> binaryOp, params IList<Term>[] terms) {
     161        return terms.Aggregate((acc, vectorizedTerm) => acc.Zip(vectorizedTerm, binaryOp).ToList());
     162      }
     163      IList<Term> UnaryOp(Func<Term, Term> unaryOp, IList<Term> term) {
     164        return term.Select(unaryOp).ToList();
     165      }
     166
     167      var evaluationResult = evaluationTrace[node];
     168
     169      if (node.Symbol is Constant) { // assume scalar constant
    148170        initialConstants.Add(((ConstantTreeNode)node).Value);
    149171        var var = new AutoDiff.Variable();
    150172        variables.Add(var);
    151         return var;
     173        return new Term[] { var };
    152174      }
    153175      if (node.Symbol is Variable || node.Symbol is BinaryFactorVariable) {
     
    156178        // factor variable values are only 0 or 1 and set in x accordingly
    157179        var varValue = factorVarNode != null ? factorVarNode.VariableValue : string.Empty;
    158         var par = FindOrCreateParameter(parameters, varNode.VariableName, varValue);
     180        var pars = evaluationResult.IsVector
     181          ? Enumerable.Range(0, evaluationResult.Vector.Count).Select(i => FindOrCreateParameter(parameters, varNode.VariableName, varValue, index: i))
     182          : FindOrCreateParameter(parameters, varNode.VariableName, varValue).ToEnumerable();
    159183
    160184        if (makeVariableWeightsVariable) {
     
    162186          var w = new AutoDiff.Variable();
    163187          variables.Add(w);
    164           return AutoDiff.TermBuilder.Product(w, par);
     188          return pars.Select(par => AutoDiff.TermBuilder.Product(w, par)).ToList();
    165189        } else {
    166           return varNode.Weight * par;
     190          return pars.Select(par => varNode.Weight * par).ToList();
    167191        }
    168192      }
     
    179203          products.Add(AutoDiff.TermBuilder.Product(wVar, par));
    180204        }
    181         return AutoDiff.TermBuilder.Sum(products);
    182       }
    183       if (node.Symbol is LaggedVariable) {
    184         var varNode = node as LaggedVariableTreeNode;
    185         var par = FindOrCreateParameter(parameters, varNode.VariableName, string.Empty, varNode.Lag);
    186 
    187         if (makeVariableWeightsVariable) {
    188           initialConstants.Add(varNode.Weight);
    189           var w = new AutoDiff.Variable();
    190           variables.Add(w);
    191           return AutoDiff.TermBuilder.Product(w, par);
    192         } else {
    193           return varNode.Weight * par;
    194         }
    195       }
     205        return new[] { AutoDiff.TermBuilder.Sum(products) };
     206      }
     207      //if (node.Symbol is LaggedVariable) {
     208      //  var varNode = node as LaggedVariableTreeNode;
     209      //  var par = FindOrCreateParameter(parameters, varNode.VariableName, string.Empty, varNode.Lag);
     210
     211      //  if (makeVariableWeightsVariable) {
     212      //    initialConstants.Add(varNode.Weight);
     213      //    var w = new AutoDiff.Variable();
     214      //    variables.Add(w);
     215      //    return AutoDiff.TermBuilder.Product(w, par);
     216      //  } else {
     217      //    return varNode.Weight * par;
     218      //  }
     219      //}
    196220      if (node.Symbol is Addition) {
    197         List<AutoDiff.Term> terms = new List<Term>();
    198         foreach (var subTree in node.Subtrees) {
    199           terms.Add(ConvertToAutoDiff(subTree));
    200         }
    201         return AutoDiff.TermBuilder.Sum(terms);
     221        var terms = node.Subtrees.Select(ConvertToAutoDiff).ToArray();
     222        return BinaryOp((a, b) => a + b, a => a, terms);
    202223      }
    203224      if (node.Symbol is Subtraction) {
    204         List<AutoDiff.Term> terms = new List<Term>();
    205         for (int i = 0; i < node.SubtreeCount; i++) {
    206           AutoDiff.Term t = ConvertToAutoDiff(node.GetSubtree(i));
    207           if (i > 0) t = -t;
    208           terms.Add(t);
    209         }
    210         if (terms.Count == 1) return -terms[0];
    211         else return AutoDiff.TermBuilder.Sum(terms);
     225        var terms = node.Subtrees.Select(ConvertToAutoDiff).ToArray();
     226        return BinaryOp((a, b) => a - b, a => -a, terms);
    212227      }
    213228      if (node.Symbol is Multiplication) {
    214         List<AutoDiff.Term> terms = new List<Term>();
    215         foreach (var subTree in node.Subtrees) {
    216           terms.Add(ConvertToAutoDiff(subTree));
    217         }
    218         if (terms.Count == 1) return terms[0];
    219         else return terms.Aggregate((a, b) => new AutoDiff.Product(a, b));
     229        var terms = node.Subtrees.Select(ConvertToAutoDiff).ToArray();
     230        return BinaryOp((a, b) => a * b, a => a, terms);
    220231      }
    221232      if (node.Symbol is Division) {
    222         List<AutoDiff.Term> terms = new List<Term>();
    223         foreach (var subTree in node.Subtrees) {
    224           terms.Add(ConvertToAutoDiff(subTree));
    225         }
    226         if (terms.Count == 1) return 1.0 / terms[0];
    227         else return terms.Aggregate((a, b) => new AutoDiff.Product(a, 1.0 / b));
     233        var terms = node.Subtrees.Select(ConvertToAutoDiff).ToArray();
     234        return BinaryOp((a, b) => a / b, a => 1.0 / a, terms);
    228235      }
    229236      if (node.Symbol is Absolute) {
    230         var x1 = ConvertToAutoDiff(node.GetSubtree(0));
    231         return abs(x1);
    232       }
    233       if (node.Symbol is AnalyticQuotient) {
    234         var x1 = ConvertToAutoDiff(node.GetSubtree(0));
    235         var x2 = ConvertToAutoDiff(node.GetSubtree(1));
    236         return x1 / (TermBuilder.Power(1 + x2 * x2, 0.5));
    237       }
     237        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     238        return UnaryOp(abs, term);
     239      }
     240      //if (node.Symbol is AnalyticQuotient) {
     241      //  var x1 = ConvertToAutoDiff(node.GetSubtree(0));
     242      //  var x2 = ConvertToAutoDiff(node.GetSubtree(1));
     243      //  return x1 / (TermBuilder.Power(1 + x2 * x2, 0.5));
     244      //}
    238245      if (node.Symbol is Logarithm) {
    239         return AutoDiff.TermBuilder.Log(
    240           ConvertToAutoDiff(node.GetSubtree(0)));
     246        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     247        return UnaryOp(TermBuilder.Log, term);
    241248      }
    242249      if (node.Symbol is Exponential) {
    243         return AutoDiff.TermBuilder.Exp(
    244           ConvertToAutoDiff(node.GetSubtree(0)));
     250        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     251        return UnaryOp(TermBuilder.Exp, term);
    245252      }
    246253      if (node.Symbol is Square) {
    247         return AutoDiff.TermBuilder.Power(
    248           ConvertToAutoDiff(node.GetSubtree(0)), 2.0);
     254        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     255        return UnaryOp(t => TermBuilder.Power(t, 2.0), term);
    249256      }
    250257      if (node.Symbol is SquareRoot) {
    251         return AutoDiff.TermBuilder.Power(
    252           ConvertToAutoDiff(node.GetSubtree(0)), 0.5);
     258        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     259        return UnaryOp(t => TermBuilder.Power(t, 0.5), term);
    253260      }
    254261      if (node.Symbol is Cube) {
    255         return AutoDiff.TermBuilder.Power(
    256           ConvertToAutoDiff(node.GetSubtree(0)), 3.0);
     262        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     263        return UnaryOp(t => TermBuilder.Power(t, 3.0), term);
    257264      }
    258265      if (node.Symbol is CubeRoot) {
    259         return cbrt(ConvertToAutoDiff(node.GetSubtree(0)));
     266        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     267        return UnaryOp(cbrt, term);
    260268      }
    261269      if (node.Symbol is Sine) {
    262         return sin(
    263           ConvertToAutoDiff(node.GetSubtree(0)));
     270        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     271        return UnaryOp(sin, term);
    264272      }
    265273      if (node.Symbol is Cosine) {
    266         return cos(
    267           ConvertToAutoDiff(node.GetSubtree(0)));
     274        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     275        return UnaryOp(cos, term);
    268276      }
    269277      if (node.Symbol is Tangent) {
    270         return tan(
    271           ConvertToAutoDiff(node.GetSubtree(0)));
     278        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     279        return UnaryOp(tan, term);
    272280      }
    273281      if (node.Symbol is HyperbolicTangent) {
    274         return tanh(
    275           ConvertToAutoDiff(node.GetSubtree(0)));
     282        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     283        return UnaryOp(tanh, term);
    276284      }
    277285      if (node.Symbol is Erf) {
    278         return erf(
    279           ConvertToAutoDiff(node.GetSubtree(0)));
     286        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     287        return UnaryOp(erf, term);
    280288      }
    281289      if (node.Symbol is Norm) {
    282         return norm(
    283           ConvertToAutoDiff(node.GetSubtree(0)));
     290        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     291        return UnaryOp(norm, term);
    284292      }
    285293      if (node.Symbol is StartSymbol) {
     
    291299          variables.Add(alpha);
    292300          var t = ConvertToAutoDiff(node.GetSubtree(0));
    293           return t * alpha + beta;
     301          if (t.Count > 1) throw new InvalidOperationException("Tree Result must be scalar value");
     302          return new[] { t[0] * alpha + beta };
    294303        } else return ConvertToAutoDiff(node.GetSubtree(0));
    295304      }
     305      if (node.Symbol is Sum) {
     306        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     307        return new[] { TermBuilder.Sum(term) };
     308      }
     309      if (node.Symbol is Mean) {
     310        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     311        return new[] { TermBuilder.Sum(term) / term.Count };
     312      }
     313      if (node.Symbol is StandardDeviation) {
     314        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     315        var mean = TermBuilder.Sum(term) / term.Count;
     316        var ssd = TermBuilder.Sum(term.Select(t => TermBuilder.Power(t - mean, 2.0)));
     317        return new[] { TermBuilder.Power(ssd / term.Count, 0.5) };
     318      }
     319      if (node.Symbol is Length) {
     320        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     321        return new[] { TermBuilder.Constant(term.Count) };
     322      }
     323      //if (node.Symbol is Min) {
     324      //}
     325      //if (node.Symbol is Max) {
     326      //}
     327      if (node.Symbol is Variance) {
     328        var term = node.Subtrees.Select(ConvertToAutoDiff).Single();
     329        var mean = TermBuilder.Sum(term) / term.Count;
     330        var ssd = TermBuilder.Sum(term.Select(t => TermBuilder.Power(t - mean, 2.0)));
     331        return new[] { ssd / term.Count };
     332      }
     333      //if (node.Symbol is Skewness) {
     334      //}
     335      //if (node.Symbol is Kurtosis) {
     336      //}
     337      //if (node.Symbol is EuclideanDistance) {
     338      //}
     339      //if (node.Symbol is Covariance) {
     340      //}
     341
     342
    296343      throw new ConversionException();
    297344    }
     
    301348    // each binary indicator is only necessary once. So we only create a parameter if this combination is not yet available
    302349    private static Term FindOrCreateParameter(Dictionary<DataForVariable, AutoDiff.Variable> parameters,
    303       string varName, string varValue = "", int lag = 0) {
    304       var data = new DataForVariable(varName, varValue, lag);
     350      string varName, string varValue = "", int lag = 0, int index = -1) {
     351      var data = new DataForVariable(varName, varValue, lag, index);
    305352
    306353      AutoDiff.Variable par = null;
     
    319366          !(n.Symbol is Variable) &&
    320367          !(n.Symbol is BinaryFactorVariable) &&
    321           !(n.Symbol is FactorVariable) &&
    322           !(n.Symbol is LaggedVariable) &&
     368          //!(n.Symbol is FactorVariable) &&
     369          //!(n.Symbol is LaggedVariable) &&
    323370          !(n.Symbol is Constant) &&
    324371          !(n.Symbol is Addition) &&
     
    338385          !(n.Symbol is StartSymbol) &&
    339386          !(n.Symbol is Absolute) &&
    340           !(n.Symbol is AnalyticQuotient) &&
     387          //!(n.Symbol is AnalyticQuotient) &&
    341388          !(n.Symbol is Cube) &&
    342           !(n.Symbol is CubeRoot)
     389          !(n.Symbol is CubeRoot) &&
     390          !(n.Symbol is Sum) &&
     391          !(n.Symbol is Mean) &&
     392          !(n.Symbol is StandardDeviation) &&
     393          !(n.Symbol is Length) &&
     394          //!(n.Symbol is Min) &&
     395          //!(n.Symbol is Max) &&
     396          !(n.Symbol is Variance)
     397        //!(n.Symbol is Skewness) &&
     398        //!(n.Symbol is Kurtosis) &&
     399        //!(n.Symbol is EuclideanDistance) &&
     400        //!(n.Symbol is Covariance)
    343401        select n).Any();
    344402      return !containsUnknownSymbol;
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.csproj

    r17593 r17726  
    161161    </Compile>
    162162    <Compile Include="Converters\LinearModelToTreeConverter.cs" />
     163    <Compile Include="Converters\VectorUnrollingTreeToAutoDiffTermConverter.cs" />
    163164    <Compile Include="Converters\VectorTreeSimplifier.cs" />
    164165    <Compile Include="Converters\TreeSimplifier.cs" />
     
    253254    <Compile Include="Symbols\Kurtosis.cs" />
    254255    <Compile Include="Symbols\Skewness.cs" />
     256    <Compile Include="Symbols\SubVector.cs" />
    255257    <Compile Include="Symbols\WindowedSymbolTreeNode.cs" />
    256258    <Compile Include="Symbols\WindowedSymbol.cs" />
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interfaces/IWindowedSymbol.cs

    r17573 r17726  
    2424
    2525namespace HeuristicLab.Problems.DataAnalysis.Symbolic {
    26   [StorableType("BB96C8EF-4DB8-4892-B3A3-358F34C06AD6")]
    2726  /// <summary>
    2827  /// Any symbol that can use a window (e.g. partial aggregation of a vector)
    2928  /// </summary>
     29  [StorableType("BB96C8EF-4DB8-4892-B3A3-358F34C06AD6")]
    3030  public interface IWindowedSymbol : ISymbol {
    3131  }
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/OpCodes.cs

    r17554 r17726  
    8787    Skewness = 60,
    8888    Kurtosis = 61,
    89     EuclideanDistance = 62 ,
     89    EuclideanDistance = 62,
    9090    Covariance = 63,
     91    SubVector = 64,
    9192
    9293  }
     
    156157    public const byte EuclideanDistance = (byte)OpCode.EuclideanDistance;
    157158    public const byte Covariance = (byte)OpCode.Covariance;
     159    public const byte SubVector = (byte)OpCode.SubVector;
    158160
    159161
     
    223225      { typeof(EuclideanDistance), OpCodes.EuclideanDistance },
    224226      { typeof(Covariance), OpCodes.Covariance },
     227      { typeof(SubVector), OpCodes.SubVector },
    225228    };
    226229
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/SymbolicDataAnalysisExpressionTreeVectorInterpreter.cs

    r17721 r17726  
    253253        } else
    254254          yield return double.NaN;
     255        state.Reset();
     256      }
     257    }
     258
     259    public IEnumerable<Dictionary<ISymbolicExpressionTreeNode, EvaluationResult>> GetIntermediateNodeValues(ISymbolicExpressionTree tree, IDataset dataset, IEnumerable<int> rows) {
     260      var state = PrepareInterpreterState(tree, dataset);
     261
     262      foreach (var rowEnum in rows) {
     263        int row = rowEnum;
     264        var traceDict = new Dictionary<ISymbolicExpressionTreeNode, EvaluationResult>();
     265        var result = Evaluate(dataset, ref row, state, traceDict);
     266        traceDict.Add(tree.Root.GetSubtree(0), result); // Add StartSymbol
     267        yield return traceDict;
    255268        state.Reset();
    256269      }
     
    353366    }
    354367
    355     private static EvaluationResult AggregateApply(EvaluationResult val, WindowedSymbolTreeNode node,
     368    private static EvaluationResult WindowedAggregateApply(EvaluationResult val, WindowedSymbolTreeNode node,
    356369      Func<double, double> sFunc = null,
    357370      Func<DoubleVector, double> vFunc = null) {
     
    370383      return EvaluationResult.NaN;
    371384    }
     385    private static EvaluationResult WindowedFunctionApply(EvaluationResult val, IWindowedSymbolTreeNode node,
     386      Func<double, double> sFunc = null,
     387      Func<DoubleVector, DoubleVector> vFunc = null) {
     388      var offset = node.Offset;
     389      var length = node.Length;
     390
     391      DoubleVector SubVector(DoubleVector v) {
     392        int index = (int)(offset * v.Count);
     393        int count = (int)(length * (v.Count - index));
     394        return v.SubVector(index, count);
     395      };
     396
     397      if (val.IsScalar && sFunc != null) return new EvaluationResult(sFunc(val.Scalar));
     398      if (val.IsVector && vFunc != null) return new EvaluationResult(vFunc(SubVector(val.Vector)));
     399      return EvaluationResult.NaN;
     400    }
     401
    372402    private static EvaluationResult AggregateMultipleApply(EvaluationResult lhs, EvaluationResult rhs,
    373403      Func<DoubleVector, DoubleVector, (DoubleVector, DoubleVector)> lengthStrategy,
     
    404434    }
    405435
    406     public virtual EvaluationResult Evaluate(IDataset dataset, ref int row, InterpreterState state) {
     436
     437    public virtual EvaluationResult Evaluate(IDataset dataset, ref int row, InterpreterState state,
     438      IDictionary<ISymbolicExpressionTreeNode, EvaluationResult> traceDict = null) {
     439
     440      void TraceEvaluation(Instruction instr, EvaluationResult result) {
     441        traceDict?.Add(instr.dynamicNode, result);
     442      }
     443
    407444      Instruction currentInstr = state.NextInstruction();
    408445      switch (currentInstr.opCode) {
    409446        case OpCodes.Add: {
    410             var cur = Evaluate(dataset, ref row, state);
     447            var cur = Evaluate(dataset, ref row, state, traceDict);
    411448            for (int i = 1; i < currentInstr.nArguments; i++) {
    412               var op = Evaluate(dataset, ref row, state);
     449              var op = Evaluate(dataset, ref row, state, traceDict);
    413450              cur = ArithmeticApply(cur, op,
    414451                (lhs, rhs) => ApplyVectorLengthStrategy(DifferentVectorLengthStrategy, lhs, rhs, 0.0),
     
    418455                (v1, v2) => v1 + v2);
    419456            }
     457            TraceEvaluation(currentInstr, cur);
    420458            return cur;
    421459          }
    422460        case OpCodes.Sub: {
    423             var cur = Evaluate(dataset, ref row, state);
     461            var cur = Evaluate(dataset, ref row, state, traceDict);
    424462            for (int i = 1; i < currentInstr.nArguments; i++) {
    425               var op = Evaluate(dataset, ref row, state);
     463              var op = Evaluate(dataset, ref row, state, traceDict);
    426464              cur = ArithmeticApply(cur, op,
    427465                (lhs, rhs) => ApplyVectorLengthStrategy(DifferentVectorLengthStrategy, lhs, rhs, 0.0),
     
    431469                (v1, v2) => v1 - v2);
    432470            }
     471            TraceEvaluation(currentInstr, cur);
    433472            return cur;
    434473          }
    435474        case OpCodes.Mul: {
    436             var cur = Evaluate(dataset, ref row, state);
     475            var cur = Evaluate(dataset, ref row, state, traceDict);
    437476            for (int i = 1; i < currentInstr.nArguments; i++) {
    438               var op = Evaluate(dataset, ref row, state);
     477              var op = Evaluate(dataset, ref row, state, traceDict);
    439478              cur = ArithmeticApply(cur, op,
    440479                (lhs, rhs) => ApplyVectorLengthStrategy(DifferentVectorLengthStrategy, lhs, rhs, 1.0),
     
    444483                (v1, v2) => v1.PointwiseMultiply(v2));
    445484            }
     485            TraceEvaluation(currentInstr, cur);
    446486            return cur;
    447487          }
    448488        case OpCodes.Div: {
    449             var cur = Evaluate(dataset, ref row, state);
     489            var cur = Evaluate(dataset, ref row, state, traceDict);
    450490            for (int i = 1; i < currentInstr.nArguments; i++) {
    451               var op = Evaluate(dataset, ref row, state);
     491              var op = Evaluate(dataset, ref row, state, traceDict);
    452492              cur = ArithmeticApply(cur, op,
    453493                (lhs, rhs) => ApplyVectorLengthStrategy(DifferentVectorLengthStrategy, lhs, rhs, 1.0),
     
    457497                (v1, v2) => v1 / v2);
    458498            }
     499            TraceEvaluation(currentInstr, cur);
    459500            return cur;
    460501          }
    461502        case OpCodes.Absolute: {
    462             var cur = Evaluate(dataset, ref row, state);
    463             return FunctionApply(cur, Math.Abs, DoubleVector.Abs);
     503            var cur = Evaluate(dataset, ref row, state, traceDict);
     504            cur = FunctionApply(cur, Math.Abs, DoubleVector.Abs);
     505            TraceEvaluation(currentInstr, cur);
     506            return cur;
    464507          }
    465508        case OpCodes.Tanh: {
    466             var cur = Evaluate(dataset, ref row, state);
    467             return FunctionApply(cur, Math.Tanh, DoubleVector.Tanh);
     509            var cur = Evaluate(dataset, ref row, state, traceDict);
     510            cur = FunctionApply(cur, Math.Tanh, DoubleVector.Tanh);
     511            TraceEvaluation(currentInstr, cur);
     512            return cur;
    468513          }
    469514        case OpCodes.Cos: {
    470             var cur = Evaluate(dataset, ref row, state);
    471             return FunctionApply(cur, Math.Cos, DoubleVector.Cos);
     515            var cur = Evaluate(dataset, ref row, state, traceDict);
     516            cur = FunctionApply(cur, Math.Cos, DoubleVector.Cos);
     517            TraceEvaluation(currentInstr, cur);
     518            return cur;
    472519          }
    473520        case OpCodes.Sin: {
    474             var cur = Evaluate(dataset, ref row, state);
    475             return FunctionApply(cur, Math.Sin, DoubleVector.Sin);
     521            var cur = Evaluate(dataset, ref row, state, traceDict);
     522            cur = FunctionApply(cur, Math.Sin, DoubleVector.Sin);
     523            TraceEvaluation(currentInstr, cur);
     524            return cur;
    476525          }
    477526        case OpCodes.Tan: {
    478             var cur = Evaluate(dataset, ref row, state);
    479             return FunctionApply(cur, Math.Tan, DoubleVector.Tan);
     527            var cur = Evaluate(dataset, ref row, state, traceDict);
     528            cur = FunctionApply(cur, Math.Tan, DoubleVector.Tan);
     529            TraceEvaluation(currentInstr, cur);
     530            return cur;
    480531          }
    481532        case OpCodes.Square: {
    482             var cur = Evaluate(dataset, ref row, state);
    483             return FunctionApply(cur,
     533            var cur = Evaluate(dataset, ref row, state, traceDict);
     534            cur = FunctionApply(cur,
    484535              s => Math.Pow(s, 2),
    485536              v => v.PointwisePower(2));
     537            TraceEvaluation(currentInstr, cur);
     538            return cur;
    486539          }
    487540        case OpCodes.Cube: {
    488             var cur = Evaluate(dataset, ref row, state);
    489             return FunctionApply(cur,
     541            var cur = Evaluate(dataset, ref row, state, traceDict);
     542            cur = FunctionApply(cur,
    490543              s => Math.Pow(s, 3),
    491544              v => v.PointwisePower(3));
     545            TraceEvaluation(currentInstr, cur);
     546            return cur;
    492547          }
    493548        case OpCodes.Power: {
    494             var x = Evaluate(dataset, ref row, state);
    495             var y = Evaluate(dataset, ref row, state);
    496             return ArithmeticApply(x, y,
     549            var x = Evaluate(dataset, ref row, state, traceDict);
     550            var y = Evaluate(dataset, ref row, state, traceDict);
     551            var cur = ArithmeticApply(x, y,
    497552              (lhs, rhs) => lhs.Count < rhs.Count
    498553                ? CutLonger(lhs, rhs)
     
    502557              (v1, s2) => v1.PointwisePower(Math.Round(s2)),
    503558              (v1, v2) => v1.PointwisePower(DoubleVector.Round(v2)));
     559            TraceEvaluation(currentInstr, cur);
     560            return cur;
    504561          }
    505562        case OpCodes.SquareRoot: {
    506             var cur = Evaluate(dataset, ref row, state);
    507             return FunctionApply(cur,
     563            var cur = Evaluate(dataset, ref row, state, traceDict);
     564            cur = FunctionApply(cur,
    508565              s => Math.Sqrt(s),
    509566              v => DoubleVector.Sqrt(v));
     567            TraceEvaluation(currentInstr, cur);
     568            return cur;
    510569          }
    511570        case OpCodes.CubeRoot: {
    512             var cur = Evaluate(dataset, ref row, state);
    513             return FunctionApply(cur,
     571            var cur = Evaluate(dataset, ref row, state, traceDict);
     572            cur = FunctionApply(cur,
    514573              s => s < 0 ? -Math.Pow(-s, 1.0 / 3.0) : Math.Pow(s, 1.0 / 3.0),
    515574              v => v.Map(s => s < 0 ? -Math.Pow(-s, 1.0 / 3.0) : Math.Pow(s, 1.0 / 3.0)));
     575            TraceEvaluation(currentInstr, cur);
     576            return cur;
    516577          }
    517578        case OpCodes.Root: {
    518             var x = Evaluate(dataset, ref row, state);
    519             var y = Evaluate(dataset, ref row, state);
    520             return ArithmeticApply(x, y,
     579            var x = Evaluate(dataset, ref row, state, traceDict);
     580            var y = Evaluate(dataset, ref row, state, traceDict);
     581            var cur = ArithmeticApply(x, y,
    521582              (lhs, rhs) => lhs.Count < rhs.Count
    522583                ? CutLonger(lhs, rhs)
     
    526587              (v1, s2) => v1.PointwisePower(1.0 / Math.Round(s2)),
    527588              (v1, v2) => v1.PointwisePower(1.0 / DoubleVector.Round(v2)));
     589            TraceEvaluation(currentInstr, cur);
     590            return cur;
    528591          }
    529592        case OpCodes.Exp: {
    530             var cur = Evaluate(dataset, ref row, state);
    531             return FunctionApply(cur,
     593            var cur = Evaluate(dataset, ref row, state, traceDict);
     594            cur = FunctionApply(cur,
    532595              s => Math.Exp(s),
    533596              v => DoubleVector.Exp(v));
     597            TraceEvaluation(currentInstr, cur);
     598            return cur;
    534599          }
    535600        case OpCodes.Log: {
    536             var cur = Evaluate(dataset, ref row, state);
    537             return FunctionApply(cur,
     601            var cur = Evaluate(dataset, ref row, state, traceDict);
     602            cur = FunctionApply(cur,
    538603              s => Math.Log(s),
    539604              v => DoubleVector.Log(v));
     605            TraceEvaluation(currentInstr, cur);
     606            return cur;
    540607          }
    541608        case OpCodes.Sum: {
    542             var cur = Evaluate(dataset, ref row, state);
    543             return AggregateApply(cur, (WindowedSymbolTreeNode)currentInstr.dynamicNode,
     609            var cur = Evaluate(dataset, ref row, state, traceDict);
     610            cur = AggregateApply(cur,
    544611              s => s,
    545612              v => v.Sum());
     613            TraceEvaluation(currentInstr, cur);
     614            return cur;
    546615          }
    547616        case OpCodes.Mean: {
    548             var cur = Evaluate(dataset, ref row, state);
    549             return AggregateApply(cur,
     617            var cur = Evaluate(dataset, ref row, state, traceDict);
     618            cur = AggregateApply(cur,
    550619              s => s,
    551620              v => Statistics.Mean(v));
     621            TraceEvaluation(currentInstr, cur);
     622            return cur;
    552623          }
    553624        case OpCodes.StandardDeviation: {
    554             var cur = Evaluate(dataset, ref row, state);
    555             return AggregateApply(cur,
     625            var cur = Evaluate(dataset, ref row, state, traceDict);
     626            cur = AggregateApply(cur,
    556627              s => 0,
    557628              v => Statistics.PopulationStandardDeviation(v));
     629            TraceEvaluation(currentInstr, cur);
     630            return cur;
    558631          }
    559632        case OpCodes.Length: {
    560             var cur = Evaluate(dataset, ref row, state);
    561             return AggregateApply(cur,
     633            var cur = Evaluate(dataset, ref row, state, traceDict);
     634            cur = AggregateApply(cur,
    562635              s => 1,
    563636              v => v.Count);
     637            TraceEvaluation(currentInstr, cur);
     638            return cur;
    564639          }
    565640        case OpCodes.Min: {
    566             var cur = Evaluate(dataset, ref row, state);
    567             return AggregateApply(cur,
     641            var cur = Evaluate(dataset, ref row, state, traceDict);
     642            cur = AggregateApply(cur,
    568643              s => s,
    569644              v => Statistics.Minimum(v));
     645            TraceEvaluation(currentInstr, cur);
     646            return cur;
    570647          }
    571648        case OpCodes.Max: {
    572             var cur = Evaluate(dataset, ref row, state);
    573             return AggregateApply(cur,
     649            var cur = Evaluate(dataset, ref row, state, traceDict);
     650            cur = AggregateApply(cur,
    574651              s => s,
    575652              v => Statistics.Maximum(v));
     653            TraceEvaluation(currentInstr, cur);
     654            return cur;
    576655          }
    577656        case OpCodes.Variance: {
    578             var cur = Evaluate(dataset, ref row, state);
    579             return AggregateApply(cur,
     657            var cur = Evaluate(dataset, ref row, state, traceDict);
     658            cur = AggregateApply(cur,
    580659              s => 0,
    581660              v => Statistics.PopulationVariance(v));
     661            TraceEvaluation(currentInstr, cur);
     662            return cur;
    582663          }
    583664        case OpCodes.Skewness: {
    584             var cur = Evaluate(dataset, ref row, state);
    585             return AggregateApply(cur,
     665            var cur = Evaluate(dataset, ref row, state, traceDict);
     666            cur = AggregateApply(cur,
    586667              s => double.NaN,
    587668              v => Statistics.PopulationSkewness(v));
     669            TraceEvaluation(currentInstr, cur);
     670            return cur;
    588671          }
    589672        case OpCodes.Kurtosis: {
    590             var cur = Evaluate(dataset, ref row, state);
    591             return AggregateApply(cur,
     673            var cur = Evaluate(dataset, ref row, state, traceDict);
     674            cur = AggregateApply(cur,
    592675              s => double.NaN,
    593676              v => Statistics.PopulationKurtosis(v));
     677            TraceEvaluation(currentInstr, cur);
     678            return cur;
    594679          }
    595680        case OpCodes.EuclideanDistance: {
    596             var x1 = Evaluate(dataset, ref row, state);
    597             var x2 = Evaluate(dataset, ref row, state);
    598             return AggregateMultipleApply(x1, x2,
     681            var x1 = Evaluate(dataset, ref row, state, traceDict);
     682            var x2 = Evaluate(dataset, ref row, state, traceDict);
     683            var cur = AggregateMultipleApply(x1, x2,
    599684              (lhs, rhs) => ApplyVectorLengthStrategy(DifferentVectorLengthStrategy, lhs, rhs, 0.0),
    600685              (s1, s2) => s1 - s2,
     
    602687              (v1, s2) => Math.Sqrt((v1 - s2).PointwisePower(2).Sum()),
    603688              (v1, v2) => Math.Sqrt((v1 - v2).PointwisePower(2).Sum()));
     689            TraceEvaluation(currentInstr, cur);
     690            return cur;
    604691          }
    605692        case OpCodes.Covariance: {
    606             var x1 = Evaluate(dataset, ref row, state);
    607             var x2 = Evaluate(dataset, ref row, state);
    608             return AggregateMultipleApply(x1, x2,
     693            var x1 = Evaluate(dataset, ref row, state, traceDict);
     694            var x2 = Evaluate(dataset, ref row, state, traceDict);
     695            var cur = AggregateMultipleApply(x1, x2,
    609696              (lhs, rhs) => ApplyVectorLengthStrategy(DifferentVectorLengthStrategy, lhs, rhs, 0.0),
    610697              (s1, s2) => 0,
     
    612699              (v1, s2) => 0,
    613700              (v1, v2) => Statistics.PopulationCovariance(v1, v2));
     701            TraceEvaluation(currentInstr, cur);
     702            return cur;
     703          }
     704        case OpCodes.SubVector: {
     705            var cur = Evaluate(dataset, ref row, state, traceDict);
     706            return WindowedFunctionApply(cur, (WindowedSymbolTreeNode)currentInstr.dynamicNode,
     707              s => s,
     708              v => v);
    614709          }
    615710        case OpCodes.Variable: {
    616711            if (row < 0 || row >= dataset.Rows) return EvaluationResult.NaN;
    617712            var variableTreeNode = (VariableTreeNode)currentInstr.dynamicNode;
    618             if (currentInstr.data is IList<double> doubleList)
    619               return new EvaluationResult(doubleList[row] * variableTreeNode.Weight);
    620             if (currentInstr.data is IList<DoubleVector> doubleVectorList)
    621               return new EvaluationResult(doubleVectorList[row] * variableTreeNode.Weight);
     713            if (currentInstr.data is IList<double> doubleList) {
     714              var cur = new EvaluationResult(doubleList[row] * variableTreeNode.Weight);
     715              TraceEvaluation(currentInstr, cur);
     716              return cur;
     717            }
     718            if (currentInstr.data is IList<DoubleVector> doubleVectorList) {
     719              var cur = new EvaluationResult(doubleVectorList[row] * variableTreeNode.Weight);
     720              TraceEvaluation(currentInstr, cur);
     721              return cur;
     722            }
    622723            throw new NotSupportedException($"Unsupported type of variable: {currentInstr.data.GetType().GetPrettyName()}");
    623724          }
     
    625726            if (row < 0 || row >= dataset.Rows) return EvaluationResult.NaN;
    626727            var factorVarTreeNode = currentInstr.dynamicNode as BinaryFactorVariableTreeNode;
    627             return new EvaluationResult(((IList<string>)currentInstr.data)[row] == factorVarTreeNode.VariableValue ? factorVarTreeNode.Weight : 0);
     728            var cur = new EvaluationResult(((IList<string>)currentInstr.data)[row] == factorVarTreeNode.VariableValue ? factorVarTreeNode.Weight : 0);
     729            TraceEvaluation(currentInstr, cur);
     730            return cur;
    628731          }
    629732        case OpCodes.FactorVariable: {
    630733            if (row < 0 || row >= dataset.Rows) return EvaluationResult.NaN;
    631734            var factorVarTreeNode = currentInstr.dynamicNode as FactorVariableTreeNode;
    632             return new EvaluationResult(factorVarTreeNode.GetValue(((IList<string>)currentInstr.data)[row]));
     735            var cur = new EvaluationResult(factorVarTreeNode.GetValue(((IList<string>)currentInstr.data)[row]));
     736            TraceEvaluation(currentInstr, cur);
     737            return cur;
    633738          }
    634739        case OpCodes.Constant: {
    635740            var constTreeNode = (ConstantTreeNode)currentInstr.dynamicNode;
    636             return new EvaluationResult(constTreeNode.Value);
     741            var cur = new EvaluationResult(constTreeNode.Value);
     742            TraceEvaluation(currentInstr, cur);
     743            return cur;
    637744          }
    638745
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/SubVector.cs

    r17725 r17726  
    2222using HeuristicLab.Common;
    2323using HeuristicLab.Core;
    24 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2524using HEAL.Attic;
    2625
    2726namespace HeuristicLab.Problems.DataAnalysis.Symbolic {
    28   [StorableType("C6C245BF-C44A-4207-A268-55641483F27F")]
    29   [Item("Sum", "Symbol that represents the sum function.")]
    30   public sealed class Sum : WindowedSymbol {
     27  [StorableType("4E9511C6-0FA4-496D-9610-35D9F779F899")]
     28  [Item("SubVector", "Symbol that represents SubVector sum function.")]
     29  public sealed class SubVector : WindowedSymbol {
    3130    private const int minimumArity = 1;
    3231    private const int maximumArity = 1;
     
    4039
    4140    [StorableConstructor]
    42     private Sum(StorableConstructorFlag _) : base(_) { }
    43     private Sum(Sum original, Cloner cloner) : base(original, cloner) { }
     41    private SubVector(StorableConstructorFlag _) : base(_) { }
     42    private SubVector(SubVector original, Cloner cloner) : base(original, cloner) { }
    4443    public override IDeepCloneable Clone(Cloner cloner) {
    45       return new Sum(this, cloner);
     44      return new SubVector(this, cloner);
    4645    }
    47     public Sum() : base("Sum", "Symbol that represents the sum function.") { }
     46    public SubVector() : base("SubVector", "Symbol that represents the SubVector function.") { }
    4847  }
    4948}
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/Sum.cs

    r17573 r17726  
    2828  [StorableType("C6C245BF-C44A-4207-A268-55641483F27F")]
    2929  [Item("Sum", "Symbol that represents the sum function.")]
    30   public sealed class Sum : WindowedSymbol {
     30  public sealed class Sum : Symbol {
    3131    private const int minimumArity = 1;
    3232    private const int maximumArity = 1;
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/WindowedSymbol.cs

    r17573 r17726  
    130130        if (value != manipulatorLengthSigma) {
    131131          manipulatorLengthSigma = value;
    132           OnChanged(EventArgs.Empty);;
     132          OnChanged(EventArgs.Empty);
    133133        }
    134134      }
Note: See TracChangeset for help on using the changeset viewer.