Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
06/29/16 10:36:52 (8 years ago)
Author:
pfleck
Message:

#2597

  • Merged recent trunk changes.
  • Adapted VariablesUsedForPrediction property for RegressionSolutionTargetResponseGradientView.
  • Fixed a reference (.dll to project ref).
Location:
branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis
Files:
43 edited

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis

  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/BaselineClassifiers/OneR.cs

    r13092 r13948  
    139139      }
    140140
    141       var model = new OneRClassificationModel(bestVariable, bestSplits.Select(s => s.thresholdValue).ToArray(), bestSplits.Select(s => s.classValue).ToArray(), bestMissingValuesClass);
     141      var model = new OneRClassificationModel(problemData.TargetVariable, bestVariable, bestSplits.Select(s => s.thresholdValue).ToArray(), bestSplits.Select(s => s.classValue).ToArray(), bestMissingValuesClass);
    142142      var solution = new OneRClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
    143143
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/BaselineClassifiers/OneRClassificationModel.cs

    r13098 r13948  
    3131  [StorableClass]
    3232  [Item("OneR Classification Model", "A model that uses intervals for one variable to determine the class.")]
    33   public class OneRClassificationModel : NamedItem, IClassificationModel {
     33  public class OneRClassificationModel : ClassificationModel {
     34    public override IEnumerable<string> VariablesUsedForPrediction {
     35      get { return new[] { Variable }; }
     36    }
     37
    3438    [Storable]
    3539    protected string variable;
     
    6670    public override IDeepCloneable Clone(Cloner cloner) { return new OneRClassificationModel(this, cloner); }
    6771
    68     public OneRClassificationModel(string variable, double[] splits, double[] classes, double missingValuesClass = double.NaN)
    69       : base() {
     72    public OneRClassificationModel(string targetVariable, string variable, double[] splits, double[] classes, double missingValuesClass = double.NaN)
     73      : base(targetVariable) {
    7074      if (splits.Length != classes.Length) {
    7175        throw new ArgumentException("Number of splits and classes has to be equal.");
     
    8488    // uses sorting to return the values in the order of rows, instead of using nested for loops
    8589    // to avoid O(n²) runtime
    86     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     90    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    8791      var values = dataset.GetDoubleValues(Variable, rows).ToArray();
    8892      var rowsArray = rows.ToArray();
     
    108112    }
    109113
    110     public IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     114    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    111115      return new OneRClassificationSolution(this, new ClassificationProblemData(problemData));
    112116    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GBM/GradientBoostingRegressionAlgorithm.cs

    r13724 r13948  
    6464    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
    6565    private const string CreateSolutionParameterName = "CreateSolution";
     66    private const string StoreRunsParameterName = "StoreRuns";
    6667    private const string RegressionAlgorithmSolutionResultParameterName = "RegressionAlgorithmResult";
    6768
     
    106107      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
    107108    }
     109    public IFixedValueParameter<BoolValue> StoreRunsParameter {
     110      get { return (IFixedValueParameter<BoolValue>)Parameters[StoreRunsParameterName]; }
     111    }
    108112
    109113    #endregion
     
    144148      get { return CreateSolutionParameter.Value.Value; }
    145149      set { CreateSolutionParameter.Value.Value = value; }
     150    }
     151
     152    public bool StoreRuns {
     153      get { return StoreRunsParameter.Value.Value; }
     154      set { StoreRunsParameter.Value.Value = value; }
    146155    }
    147156
     
    178187      var regressionAlgs = new ItemSet<IAlgorithm>(new IAlgorithm[] {
    179188        new RandomForestRegression(),
    180         sgp, 
     189        sgp,
    181190        mctsSymbReg
    182191      });
     
    206215        "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
    207216      Parameters[CreateSolutionParameterName].Hidden = true;
     217      Parameters.Add(new FixedValueParameter<BoolValue>(StoreRunsParameterName,
     218        "Flag that indicates if the results of the individual runs should be stored for detailed analysis", new BoolValue(false)));
     219      Parameters[StoreRunsParameterName].Hidden = true;
    208220    }
    209221
     
    218230
    219231      var table = new DataTable("Qualities");
    220       table.Rows.Add(new DataRow("Loss (train)"));
    221       table.Rows.Add(new DataRow("Loss (test)"));
     232      table.Rows.Add(new DataRow(" (train)"));
     233      table.Rows.Add(new DataRow(" (test)"));
    222234      Results.Add(new Result("Qualities", table));
    223235      var curLoss = new DoubleValue();
    224236      var curTestLoss = new DoubleValue();
    225       Results.Add(new Result("Loss (train)", curLoss));
    226       Results.Add(new Result("Loss (test)", curTestLoss));
     237      Results.Add(new Result(" (train)", curLoss));
     238      Results.Add(new Result(" (test)", curTestLoss));
    227239      var runCollection = new RunCollection();
    228       Results.Add(new Result("Runs", runCollection));
     240      if (StoreRuns)
     241        Results.Add(new Result("Runs", runCollection));
    229242
    230243      // init
    231244      var problemData = Problem.ProblemData;
    232       var targetVarName = Problem.ProblemData.TargetVariable;
     245      var targetVarName = problemData.TargetVariable;
    233246      var activeVariables = problemData.AllowedInputVariables.Concat(new string[] { problemData.TargetVariable });
    234247      var modifiableDataset = new ModifiableDataset(
     
    273286          IRegressionModel model;
    274287          IRun run;
     288
    275289          // try to find a model. The algorithm might fail to produce a model. In this case we just retry until the iterations are exhausted
    276           if (TryExecute(alg, RegressionAlgorithmResult, out model, out run)) {
     290          if (TryExecute(alg, rand.Next(), RegressionAlgorithmResult, out model, out run)) {
    277291            int row = 0;
    278292            // update predictions for training and test
     
    303317          }
    304318
    305           runCollection.Add(run);
    306           table.Rows["Loss (train)"].Values.Add(curLoss.Value);
    307           table.Rows["Loss (test)"].Values.Add(curTestLoss.Value);
     319          if (StoreRuns)
     320            runCollection.Add(run);
     321          table.Rows["R² (train)"].Values.Add(curLoss.Value);
     322          table.Rows["R² (test)"].Values.Add(curTestLoss.Value);
    308323          iterations.Value = i + 1;
    309324        }
     
    317332          // just produce an ensemble solution for now (TODO: correct scaling or linear regression for ensemble model weights)
    318333
    319           var ensembleModel = new RegressionEnsembleModel(models) { AverageModelEstimates = false };
    320           var ensembleSolution = ensembleModel.CreateRegressionSolution((IRegressionProblemData)problemData.Clone());
     334          var ensembleSolution = CreateEnsembleSolution(models, (IRegressionProblemData)problemData.Clone());
    321335          Results.Add(new Result("EnsembleSolution", ensembleSolution));
    322336        }
     
    326340        alg.Prepare(true);
    327341      }
     342    }
     343
     344    private static IRegressionEnsembleSolution CreateEnsembleSolution(List<IRegressionModel> models,
     345      IRegressionProblemData problemData) {
     346      var rows = problemData.TrainingPartition.Size;
     347      var features = models.Count;
     348      double[,] inputMatrix = new double[rows, features + 1];
     349      //add model estimates
     350      for (int m = 0; m < models.Count; m++) {
     351        var model = models[m];
     352        var estimates = model.GetEstimatedValues(problemData.Dataset, problemData.TrainingIndices);
     353        int estimatesCounter = 0;
     354        foreach (var estimate in estimates) {
     355          inputMatrix[estimatesCounter, m] = estimate;
     356          estimatesCounter++;
     357        }
     358      }
     359
     360      //add target
     361      var targets = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices);
     362      int targetCounter = 0;
     363      foreach (var target in targets) {
     364        inputMatrix[targetCounter, models.Count] = target;
     365        targetCounter++;
     366      }
     367
     368      alglib.linearmodel lm = new alglib.linearmodel();
     369      alglib.lrreport ar = new alglib.lrreport();
     370      double[] coefficients;
     371      int retVal = 1;
     372      alglib.lrbuildz(inputMatrix, rows, features, out retVal, out lm, out ar);
     373      if (retVal != 1) throw new ArgumentException("Error in calculation of linear regression solution");
     374
     375      alglib.lrunpack(lm, out coefficients, out features);
     376
     377      var ensembleModel = new RegressionEnsembleModel(models, coefficients.Take(models.Count)) { AverageModelEstimates = false };
     378      var ensembleSolution = (IRegressionEnsembleSolution)ensembleModel.CreateRegressionSolution(problemData);
     379      return ensembleSolution;
    328380    }
    329381
     
    391443      progRootNode.AddSubtree(startNode);
    392444      var t = new SymbolicExpressionTree(progRootNode);
    393       var combinedModel = new SymbolicRegressionModel(t, interpreter, lowerLimit, upperLimit);
     445      var combinedModel = new SymbolicRegressionModel(problemData.TargetVariable, t, interpreter, lowerLimit, upperLimit);
    394446      var sol = new SymbolicRegressionSolution(combinedModel, problemData);
    395447      return sol;
     
    405457    }
    406458
    407     private static bool TryExecute(IAlgorithm alg, string regressionAlgorithmResultName, out IRegressionModel model, out IRun run) {
     459    private static bool TryExecute(IAlgorithm alg, int seed, string regressionAlgorithmResultName, out IRegressionModel model, out IRun run) {
    408460      model = null;
     461      SetSeed(alg, seed);
    409462      using (var wh = new AutoResetEvent(false)) {
    410         EventHandler<EventArgs<Exception>> handler = (sender, args) => wh.Set();
     463        Exception ex = null;
     464        EventHandler<EventArgs<Exception>> handler = (sender, args) => {
     465          ex = args.Value;
     466          wh.Set();
     467        };
    411468        EventHandler handler2 = (sender, args) => wh.Set();
    412469        alg.ExceptionOccurred += handler;
     
    417474          wh.WaitOne();
    418475
     476          if (ex != null) throw new AggregateException(ex);
    419477          run = alg.Runs.Last();
     478          alg.Runs.Clear();
    420479          var sols = alg.Results.Select(r => r.Value).OfType<IRegressionSolution>();
    421480          if (!sols.Any()) return false;
     
    444503      return model != null;
    445504    }
     505
     506    private static void SetSeed(IAlgorithm alg, int seed) {
     507      // no common interface for algs that use a PRNG -> use naming convention to set seed
     508      var paramItem = alg as IParameterizedItem;
     509
     510      if (paramItem.Parameters.ContainsKey("SetSeedRandomly")) {
     511        ((BoolValue)paramItem.Parameters["SetSeedRandomly"].ActualValue).Value = false;
     512        ((IntValue)paramItem.Parameters["Seed"].ActualValue).Value = seed;
     513      } else {
     514        throw new ArgumentException("Base learner does not have a seed parameter (algorithm {0})", alg.Name);
     515      }
     516
     517    }
    446518  }
    447519}
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceProduct.cs

    r13784 r13948  
    9494    public static IList<double> GetGradient(double[,] x, int i, int j, List<ParameterizedCovarianceFunction> factorFunctions) {
    9595      var covariances = factorFunctions.Select(f => f.Covariance(x, i, j)).ToArray();
    96       var gr = new List<double>(factorFunctions.Sum(f => f.CovarianceGradient(x, i, j).Count));
     96      var gr = new List<double>();
    9797      for (int ii = 0; ii < factorFunctions.Count; ii++) {
    9898        foreach (var g in factorFunctions[ii].CovarianceGradient(x, i, j)) {
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSum.cs

    r13784 r13948  
    8888      sum.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Sum();
    8989      sum.CovarianceGradient = (x, i, j) => {
    90         var g = new List<double>(functions.Sum(e => e.CovarianceGradient(x, i, j).Count));
     90        var g = new List<double>();
    9191        foreach (var e in functions)
    9292          g.AddRange(e.CovarianceGradient(x, i, j));
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r13823 r13948  
    3434  [StorableClass]
    3535  [Item("GaussianProcessModel", "Represents a Gaussian process posterior.")]
    36   public sealed class GaussianProcessModel : NamedItem, IGaussianProcessModel {
     36  public sealed class GaussianProcessModel : RegressionModel, IGaussianProcessModel {
     37    public override IEnumerable<string> VariablesUsedForPrediction {
     38      get { return allowedInputVariables; }
     39    }
     40
    3741    [Storable]
    3842    private double negativeLogLikelihood;
     
    6165      get { return meanFunction; }
    6266    }
    63     [Storable]
    64     private string targetVariable;
    65     public string TargetVariable {
    66       get { return targetVariable; }
    67     }
     67
    6868    [Storable]
    6969    private string[] allowedInputVariables;
     
    128128      this.trainingDataset = cloner.Clone(original.trainingDataset);
    129129      this.negativeLogLikelihood = original.negativeLogLikelihood;
    130       this.targetVariable = original.targetVariable;
    131130      this.sqrSigmaNoise = original.sqrSigmaNoise;
    132131      if (original.meanParameter != null) {
     
    147146      IEnumerable<double> hyp, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction,
    148147      bool scaleInputs = true)
    149       : base() {
     148      : base(targetVariable) {
    150149      this.name = ItemName;
    151150      this.description = ItemDescription;
    152151      this.meanFunction = (IMeanFunction)meanFunction.Clone();
    153152      this.covarianceFunction = (ICovarianceFunction)covarianceFunction.Clone();
    154       this.targetVariable = targetVariable;
    155153      this.allowedInputVariables = allowedInputVariables.ToArray();
    156154
     
    182180
    183181      IEnumerable<double> y;
    184       y = ds.GetDoubleValues(targetVariable, rows);
     182      y = ds.GetDoubleValues(TargetVariable, rows);
    185183
    186184      int n = x.GetLength(0);
     
    301299
    302300    #region IRegressionModel Members
    303     public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     301    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    304302      return GetEstimatedValuesHelper(dataset, rows);
    305303    }
    306     public GaussianProcessRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     304    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    307305      return new GaussianProcessRegressionSolution(this, new RegressionProblemData(problemData));
    308     }
    309     IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    310       return CreateRegressionSolution(problemData);
    311306    }
    312307    #endregion
     
    392387      }
    393388    }
     389
    394390  }
    395391}
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/StudentTProcessModel.cs

    r13823 r13948  
    3434  [StorableClass]
    3535  [Item("StudentTProcessModel", "Represents a Student-t process posterior.")]
    36   public sealed class StudentTProcessModel : NamedItem, IGaussianProcessModel {
     36  public sealed class StudentTProcessModel : RegressionModel, IGaussianProcessModel {
     37    public override IEnumerable<string> VariablesUsedForPrediction {
     38      get { return allowedInputVariables; }
     39    }
     40
    3741    [Storable]
    3842    private double negativeLogLikelihood;
     
    6165      get { return meanFunction; }
    6266    }
    63     [Storable]
    64     private string targetVariable;
    65     public string TargetVariable {
    66       get { return targetVariable; }
    67     }
     67
    6868    [Storable]
    6969    private string[] allowedInputVariables;
     
    131131      this.trainingDataset = cloner.Clone(original.trainingDataset);
    132132      this.negativeLogLikelihood = original.negativeLogLikelihood;
    133       this.targetVariable = original.targetVariable;
    134133      if (original.meanParameter != null) {
    135134        this.meanParameter = (double[])original.meanParameter.Clone();
     
    151150      IEnumerable<double> hyp, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction,
    152151      bool scaleInputs = true)
    153       : base() {
     152      : base(targetVariable) {
    154153      this.name = ItemName;
    155154      this.description = ItemDescription;
    156155      this.meanFunction = (IMeanFunction)meanFunction.Clone();
    157156      this.covarianceFunction = (ICovarianceFunction)covarianceFunction.Clone();
    158       this.targetVariable = targetVariable;
    159157      this.allowedInputVariables = allowedInputVariables.ToArray();
    160158
     
    186184
    187185      IEnumerable<double> y;
    188       y = ds.GetDoubleValues(targetVariable, rows);
     186      y = ds.GetDoubleValues(TargetVariable, rows);
    189187
    190188      int n = x.GetLength(0);
     
    318316
    319317    #region IRegressionModel Members
    320     public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     318    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    321319      return GetEstimatedValuesHelper(dataset, rows);
    322320    }
    323     public GaussianProcessRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     321    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    324322      return new GaussianProcessRegressionSolution(this, new RegressionProblemData(problemData));
    325     }
    326     IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    327       return CreateRegressionSolution(problemData);
    328323    }
    329324    #endregion
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModel.cs

    r13157 r13948  
    3333  [Item("Gradient boosted tree model", "")]
    3434  // this is essentially a collection of weighted regression models
    35   public sealed class GradientBoostedTreesModel : NamedItem, IGradientBoostedTreesModel {
     35  public sealed class GradientBoostedTreesModel : RegressionModel, IGradientBoostedTreesModel {
    3636    // BackwardsCompatibility3.4 for allowing deserialization & serialization of old models
    3737    #region Backwards compatible code, remove with 3.5
     
    5858    #endregion
    5959
     60    public override IEnumerable<string> VariablesUsedForPrediction {
     61      get { return models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x); }
     62    }
     63
    6064    private readonly IList<IRegressionModel> models;
    6165    public IEnumerable<IRegressionModel> Models { get { return models; } }
     
    7781    }
    7882    [Obsolete("The constructor of GBTModel should not be used directly anymore (use GBTModelSurrogate instead)")]
    79     public GradientBoostedTreesModel(IEnumerable<IRegressionModel> models, IEnumerable<double> weights)
    80       : base("Gradient boosted tree model", string.Empty) {
     83    internal GradientBoostedTreesModel(IEnumerable<IRegressionModel> models, IEnumerable<double> weights)
     84      : base(string.Empty, "Gradient boosted tree model", string.Empty) {
    8185      this.models = new List<IRegressionModel>(models);
    8286      this.weights = new List<double>(weights);
     
    8993    }
    9094
    91     public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     95    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    9296      // allocate target array go over all models and add up weighted estimation for each row
    9397      if (!rows.Any()) return Enumerable.Empty<double>(); // return immediately if rows is empty. This prevents multiple iteration over lazy rows enumerable.
     
    105109    }
    106110
    107     public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     111    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    108112      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
    109113    }
     114
    110115  }
    111116}
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs

    r13157 r13948  
    2222
    2323using System.Collections.Generic;
     24using System.Linq;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
     
    3334  // recalculate the actual GBT model on demand
    3435  [Item("Gradient boosted tree model", "")]
    35   public sealed class GradientBoostedTreesModelSurrogate : NamedItem, IGradientBoostedTreesModel {
     36  public sealed class GradientBoostedTreesModelSurrogate : RegressionModel, IGradientBoostedTreesModel {
    3637    // don't store the actual model!
    3738    private IGradientBoostedTreesModel actualModel; // the actual model is only recalculated when necessary
     
    5556
    5657
     58    public override IEnumerable<string> VariablesUsedForPrediction {
     59      get { return actualModel.Models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x); }
     60    }
     61
    5762    [StorableConstructor]
    5863    private GradientBoostedTreesModelSurrogate(bool deserializing) : base(deserializing) { }
     
    7378
    7479    // create only the surrogate model without an actual model
    75     public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
    76       : base("Gradient boosted tree model", string.Empty) {
     80    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
     81      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
     82      : base(trainingProblemData.TargetVariable, "Gradient boosted tree model", string.Empty) {
    7783      this.trainingProblemData = trainingProblemData;
    7884      this.seed = seed;
     
    8692
    8793    // wrap an actual model in a surrograte
    88     public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu, IGradientBoostedTreesModel model)
     94    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
     95      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu,
     96      IGradientBoostedTreesModel model)
    8997      : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) {
    9098      this.actualModel = model;
     
    96104
    97105    // forward message to actual model (recalculate model first if necessary)
    98     public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     106    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    99107      if (actualModel == null) actualModel = RecalculateModel();
    100108      return actualModel.GetEstimatedValues(dataset, rows);
    101109    }
    102110
    103     public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     111    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    104112      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
    105113    }
    106 
    107114
    108115    private IGradientBoostedTreesModel RecalculateModel() {
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeBuilder.cs

    r13065 r13948  
    180180
    181181
    182     // processes potential splits from the queue as long as splits are left and the maximum size of the tree is not reached
     182    // processes potential splits from the queue as long as splits are remaining and the maximum size of the tree is not reached
    183183    private void CreateRegressionTreeFromQueue(int maxNodes, ILossFunction lossFunction) {
    184184      while (queue.Any() && curTreeNodeIdx + 1 < maxNodes) { // two nodes are created in each loop
     
    204204
    205205        // overwrite existing leaf node with an internal node
    206         tree[f.ParentNodeIdx] = new RegressionTreeModel.TreeNode(f.SplittingVariable, f.SplittingThreshold, leftTreeIdx, rightTreeIdx);
     206        tree[f.ParentNodeIdx] = new RegressionTreeModel.TreeNode(f.SplittingVariable, f.SplittingThreshold, leftTreeIdx, rightTreeIdx, weightLeft: (splitIdx - startIdx + 1) / (double)(endIdx - startIdx + 1));
    207207      }
    208208    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs

    r13030 r13948  
    3434  [StorableClass]
    3535  [Item("RegressionTreeModel", "Represents a decision tree for regression.")]
    36   public sealed class RegressionTreeModel : NamedItem, IRegressionModel {
     36  public sealed class RegressionTreeModel : RegressionModel {
     37    public override IEnumerable<string> VariablesUsedForPrediction {
     38      get { return tree.Select(t => t.VarName).Where(v => v != TreeNode.NO_VARIABLE); }
     39    }
    3740
    3841    // trees are represented as a flat array   
     
    4043      public readonly static string NO_VARIABLE = null;
    4144
    42       public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1)
     45      public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1, double weightLeft = -1.0)
    4346        : this() {
    4447        VarName = varName;
     
    4649        LeftIdx = leftIdx;
    4750        RightIdx = rightIdx;
    48       }
    49 
    50       public string VarName { get; private set; } // name of the variable for splitting or NO_VARIABLE if terminal node
    51       public double Val { get; private set; } // threshold
    52       public int LeftIdx { get; private set; }
    53       public int RightIdx { get; private set; }
     51        WeightLeft = weightLeft;
     52      }
     53
     54      public string VarName { get; internal set; } // name of the variable for splitting or NO_VARIABLE if terminal node
     55      public double Val { get; internal set; } // threshold
     56      public int LeftIdx { get; internal set; }
     57      public int RightIdx { get; internal set; }
     58      public double WeightLeft { get; internal set; } // for partial dependence plots (value in range [0..1] describes the fraction of training samples for the left sub-tree
     59
    5460
    5561      // necessary because the default implementation of GetHashCode for structs in .NET would only return the hashcode of val here
     
    6470            LeftIdx.Equals(other.LeftIdx) &&
    6571            RightIdx.Equals(other.RightIdx) &&
     72            WeightLeft.Equals(other.WeightLeft) &&
    6673            EqualStrings(VarName, other.VarName);
    6774        } else {
     
    7986    private TreeNode[] tree;
    8087
    81     [Storable]
     88    #region old storable format
     89    // remove with HL 3.4
     90    [Storable(AllowOneWay = true)]
    8291    // to prevent storing the references to data caches in nodes
    83     // TODO seemingly it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary)
     92    // seemingly, it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary)
    8493    private Tuple<string, double, int, int>[] SerializedTree {
    85       get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
    86       set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4)).ToArray(); }
    87     }
     94      // get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
     95      set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4, -1.0)).ToArray(); } // use a weight of -1.0 to indicate that partial dependence cannot be calculated for old models
     96    }
     97    #endregion
     98    #region new storable format
     99    [Storable]
     100    private string[] SerializedTreeVarNames {
     101      get { return tree.Select(t => t.VarName).ToArray(); }
     102      set {
     103        if (tree == null) tree = new TreeNode[value.Length];
     104        for (int i = 0; i < value.Length; i++) {
     105          tree[i].VarName = value[i];
     106        }
     107      }
     108    }
     109    [Storable]
     110    private double[] SerializedTreeValues {
     111      get { return tree.Select(t => t.Val).ToArray(); }
     112      set {
     113        if (tree == null) tree = new TreeNode[value.Length];
     114        for (int i = 0; i < value.Length; i++) {
     115          tree[i].Val = value[i];
     116        }
     117      }
     118    }
     119    [Storable]
     120    private int[] SerializedTreeLeftIdx {
     121      get { return tree.Select(t => t.LeftIdx).ToArray(); }
     122      set {
     123        if (tree == null) tree = new TreeNode[value.Length];
     124        for (int i = 0; i < value.Length; i++) {
     125          tree[i].LeftIdx = value[i];
     126        }
     127      }
     128    }
     129    [Storable]
     130    private int[] SerializedTreeRightIdx {
     131      get { return tree.Select(t => t.RightIdx).ToArray(); }
     132      set {
     133        if (tree == null) tree = new TreeNode[value.Length];
     134        for (int i = 0; i < value.Length; i++) {
     135          tree[i].RightIdx = value[i];
     136        }
     137      }
     138    }
     139    [Storable]
     140    private double[] SerializedTreeWeightLeft {
     141      get { return tree.Select(t => t.WeightLeft).ToArray(); }
     142      set {
     143        if (tree == null) tree = new TreeNode[value.Length];
     144        for (int i = 0; i < value.Length; i++) {
     145          tree[i].WeightLeft = value[i];
     146        }
     147      }
     148    }
     149    #endregion
     150
     151
     152
    88153
    89154    [StorableConstructor]
     
    98163    }
    99164
    100     internal RegressionTreeModel(TreeNode[] tree)
    101       : base("RegressionTreeModel", "Represents a decision tree for regression.") {
     165    internal RegressionTreeModel(TreeNode[] tree, string target = "Target")
     166      : base(target, "RegressionTreeModel", "Represents a decision tree for regression.") {
    102167      this.tree = tree;
    103168    }
     
    108173        if (node.VarName == TreeNode.NO_VARIABLE)
    109174          return node.Val;
    110 
    111         if (columnCache[nodeIdx][row] <= node.Val)
     175        if (columnCache[nodeIdx] == null) {
     176          if (node.WeightLeft.IsAlmost(-1.0)) throw new InvalidOperationException("Cannot calculate partial dependence for trees loaded from older versions of HeuristicLab.");
     177          // weighted average for partial dependence plot (recursive here because we need to calculate both sub-trees)
     178          return node.WeightLeft * GetPredictionForRow(t, columnCache, node.LeftIdx, row) +
     179                 (1.0 - node.WeightLeft) * GetPredictionForRow(t, columnCache, node.RightIdx, row);
     180        } else if (columnCache[nodeIdx][row] <= node.Val)
    112181          nodeIdx = node.LeftIdx;
    113182        else
     
    121190    }
    122191
    123     public IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
     192    public override IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
    124193      // lookup columns for variableNames in one pass over the tree to speed up evaluation later on
    125194      ReadOnlyCollection<double>[] columnCache = new ReadOnlyCollection<double>[tree.Length];
     
    127196      for (int i = 0; i < tree.Length; i++) {
    128197        if (tree[i].VarName != TreeNode.NO_VARIABLE) {
    129           columnCache[i] = ds.GetReadOnlyDoubleValues(tree[i].VarName);
     198          // tree models also support calculating estimations if not all variables used for training are available in the dataset
     199          if (ds.ColumnNames.Contains(tree[i].VarName))
     200            columnCache[i] = ds.GetReadOnlyDoubleValues(tree[i].VarName);
    130201        }
    131202      }
     
    133204    }
    134205
    135     public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     206    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    136207      return new RegressionSolution(this, new RegressionProblemData(problemData));
    137208    }
     
    148219      } else {
    149220        return
    150           TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F}", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val))
    151         + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} >  {3:F}", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val));
    152       }
    153     }
     221          TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F} ({4:N3})", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, n.WeightLeft))
     222        + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2}  >  {3:F} ({4:N3}))", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, 1.0 - n.WeightLeft));
     223      }
     224    }
     225
    154226  }
    155227}
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs

    r12509 r13948  
    111111      IClassificationProblemData problemData,
    112112      IEnumerable<int> rows) {
    113       var model = new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter, new AccuracyMaximizationThresholdCalculator());
     113      var model = new SymbolicDiscriminantFunctionClassificationModel(problemData.TargetVariable, tree, interpreter, new AccuracyMaximizationThresholdCalculator());
    114114      model.RecalculateModelParameters(problemData, rows);
    115115      return model;
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearRegression.cs

    r13238 r13948  
    110110      addition.AddSubtree(cNode);
    111111
    112       SymbolicRegressionSolution solution = new SymbolicRegressionSolution(new SymbolicRegressionModel(tree, new SymbolicDataAnalysisExpressionTreeInterpreter()), (IRegressionProblemData)problemData.Clone());
     112      SymbolicRegressionSolution solution = new SymbolicRegressionSolution(new SymbolicRegressionModel(problemData.TargetVariable, tree, new SymbolicDataAnalysisExpressionTreeInterpreter()), (IRegressionProblemData)problemData.Clone());
    113113      solution.Model.Name = "Linear Regression Model";
    114114      solution.Name = "Linear Regression Solution";
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitClassification.cs

    r13238 r13948  
    9595      relClassError = alglib.mnlrelclserror(lm, inputMatrix, nRows);
    9696
    97       MultinomialLogitClassificationSolution solution = new MultinomialLogitClassificationSolution((IClassificationProblemData)problemData.Clone(), new MultinomialLogitModel(lm, targetVariable, allowedInputVariables, classValues));
     97      MultinomialLogitClassificationSolution solution = new MultinomialLogitClassificationSolution(new MultinomialLogitModel(lm, targetVariable, allowedInputVariables, classValues), (IClassificationProblemData)problemData.Clone());
    9898      return solution;
    9999    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitClassificationSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public MultinomialLogitClassificationSolution(IClassificationProblemData problemData, MultinomialLogitModel logitModel)
     45    public MultinomialLogitClassificationSolution( MultinomialLogitModel logitModel,IClassificationProblemData problemData)
    4646      : base(logitModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs

    r12509 r13948  
    3434  [StorableClass]
    3535  [Item("Multinomial Logit Model", "Represents a multinomial logit model for classification.")]
    36   public sealed class MultinomialLogitModel : NamedItem, IClassificationModel {
     36  public sealed class MultinomialLogitModel : ClassificationModel {
    3737
    3838    private alglib.logitmodel logitModel;
     
    4848    }
    4949
    50     [Storable]
    51     private string targetVariable;
     50    public override IEnumerable<string> VariablesUsedForPrediction {
     51      get { return allowedInputVariables; }
     52    }
     53
    5254    [Storable]
    5355    private string[] allowedInputVariables;
     
    6466      logitModel = new alglib.logitmodel();
    6567      logitModel.innerobj.w = (double[])original.logitModel.innerobj.w.Clone();
    66       targetVariable = original.targetVariable;
    6768      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    6869      classValues = (double[])original.classValues.Clone();
    6970    }
    7071    public MultinomialLogitModel(alglib.logitmodel logitModel, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues)
    71       : base() {
     72      : base(targetVariable) {
    7273      this.name = ItemName;
    7374      this.description = ItemDescription;
    7475      this.logitModel = logitModel;
    75       this.targetVariable = targetVariable;
    7676      this.allowedInputVariables = allowedInputVariables.ToArray();
    7777      this.classValues = (double[])classValues.Clone();
     
    8282    }
    8383
    84     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     84    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    8585      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    8686
     
    108108    }
    109109
    110     public MultinomialLogitClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    111       return new MultinomialLogitClassificationSolution(new ClassificationProblemData(problemData), this);
    112     }
    113     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    114       return CreateClassificationSolution(problemData);
     110    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     111      return new MultinomialLogitClassificationSolution(this, new ClassificationProblemData(problemData));
    115112    }
    116113
     
    135132    }
    136133    #endregion
     134
    137135  }
    138136}
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/MctsSymbolicRegression/MctsSymbolicRegressionStatic.cs

    r13669 r13948  
    2525using System.Linq;
    2626using HeuristicLab.Algorithms.DataAnalysis.MctsSymbolicRegression.Policies;
    27 using HeuristicLab.Common;
    2827using HeuristicLab.Core;
    2928using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
     
    177176
    178177          var t = new SymbolicExpressionTree(treeGen.Exec(bestCode, bestConsts, bestNParams, scalingFactor, scalingOffset));
    179           var model = new SymbolicRegressionModel(t, interpreter, lowerEstimationLimit, upperEstimationLimit);
     178          var model = new SymbolicRegressionModel(problemData.TargetVariable, t, interpreter, lowerEstimationLimit, upperEstimationLimit);
    180179
    181180          // model has already been scaled linearly in Eval
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaClassificationSolution.cs

    r12012 r13948  
    4040      : base(original, cloner) {
    4141    }
    42     public NcaClassificationSolution(IClassificationProblemData problemData, INcaModel ncaModel)
     42    public NcaClassificationSolution(INcaModel ncaModel, IClassificationProblemData problemData)
    4343      : base(ncaModel, problemData) {
    4444    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaModel.cs

    r12509 r13948  
    3030  [Item("NCA Model", "")]
    3131  [StorableClass]
    32   public class NcaModel : NamedItem, INcaModel {
     32  public class NcaModel : ClassificationModel, INcaModel {
     33    public override IEnumerable<string> VariablesUsedForPrediction {
     34      get { return allowedInputVariables; }
     35    }
    3336
    3437    [Storable]
     
    3942    [Storable]
    4043    private string[] allowedInputVariables;
    41     [Storable]
    42     private string targetVariable;
    4344    [Storable]
    4445    private INearestNeighbourModel nnModel;
     
    5253      this.transformationMatrix = (double[,])original.transformationMatrix.Clone();
    5354      this.allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    54       this.targetVariable = original.targetVariable;
    5555      this.nnModel = cloner.Clone(original.nnModel);
    5656      this.classValues = (double[])original.classValues.Clone();
    5757    }
    58     public NcaModel(int k, double[,] transformationMatrix, IDataset dataset, IEnumerable<int> rows, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues) {
     58    public NcaModel(int k, double[,] transformationMatrix, IDataset dataset, IEnumerable<int> rows, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues)
     59      : base(targetVariable) {
    5960      Name = ItemName;
    6061      Description = ItemDescription;
    6162      this.transformationMatrix = (double[,])transformationMatrix.Clone();
    6263      this.allowedInputVariables = allowedInputVariables.ToArray();
    63       this.targetVariable = targetVariable;
    6464      this.classValues = (double[])classValues.Clone();
    6565
     
    7272    }
    7373
    74     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     74    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    7575      var ds = ReduceDataset(dataset, rows);
    7676      return nnModel.GetEstimatedClassValues(ds, Enumerable.Range(0, ds.Rows));
    7777    }
    7878
    79     public INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    80       return new NcaClassificationSolution(new ClassificationProblemData(problemData), this);
     79    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     80      return new NcaClassificationSolution(this, new ClassificationProblemData(problemData));
    8181    }
    8282
    83     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    84       return CreateClassificationSolution(problemData);
     83    INcaClassificationSolution INcaModel.CreateClassificationSolution(IClassificationProblemData problemData) {
     84      return new NcaClassificationSolution(this, new ClassificationProblemData(problemData));
    8585    }
    8686
     
    8888      var data = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    8989
    90       var targets = dataset.GetDoubleValues(targetVariable, rows).ToArray();
     90      var targets = dataset.GetDoubleValues(TargetVariable, rows).ToArray();
    9191      var result = new double[data.GetLength(0), transformationMatrix.GetLength(1) + 1];
    9292      for (int i = 0; i < data.GetLength(0); i++)
     
    104104          .Range(0, transformationMatrix.GetLength(1))
    105105          .Select(x => "X" + x.ToString())
    106           .Concat(targetVariable.ToEnumerable()),
     106          .Concat(TargetVariable.ToEnumerable()),
    107107        Reduce(dataset, rows));
    108108    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassification.cs

    r13238 r13948  
    8181    public static IClassificationSolution CreateNearestNeighbourClassificationSolution(IClassificationProblemData problemData, int k) {
    8282      var problemDataClone = (IClassificationProblemData)problemData.Clone();
    83       return new NearestNeighbourClassificationSolution(problemDataClone, Train(problemDataClone, k));
     83      return new NearestNeighbourClassificationSolution(Train(problemDataClone, k), problemDataClone);
    8484    }
    8585
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassificationSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public NearestNeighbourClassificationSolution(IClassificationProblemData problemData, INearestNeighbourModel nnModel)
     45    public NearestNeighbourClassificationSolution(INearestNeighbourModel nnModel, IClassificationProblemData problemData)
    4646      : base(nnModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs

    r12509 r13948  
    3434  [StorableClass]
    3535  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
    36   public sealed class NearestNeighbourModel : NamedItem, INearestNeighbourModel {
     36  public sealed class NearestNeighbourModel : ClassificationModel, INearestNeighbourModel {
    3737
    3838    private alglib.nearestneighbor.kdtree kdTree;
     
    4848    }
    4949
    50     [Storable]
    51     private string targetVariable;
     50    public override IEnumerable<string> VariablesUsedForPrediction {
     51      get { return allowedInputVariables; }
     52    }
     53
    5254    [Storable]
    5355    private string[] allowedInputVariables;
     
    9193
    9294      k = original.k;
    93       targetVariable = original.targetVariable;
    9495      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    9596      if (original.classValues != null)
    9697        this.classValues = (double[])original.classValues.Clone();
    9798    }
    98     public NearestNeighbourModel(IDataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) {
     99    public NearestNeighbourModel(IDataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
     100      : base(targetVariable) {
    99101      Name = ItemName;
    100102      Description = ItemDescription;
    101103      this.k = k;
    102       this.targetVariable = targetVariable;
    103104      this.allowedInputVariables = allowedInputVariables.ToArray();
    104105
     
    163164    }
    164165
    165     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     166    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    166167      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
    167168      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
     
    201202    }
    202203
    203     public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    204       return new NearestNeighbourRegressionSolution(new RegressionProblemData(problemData), this);
    205     }
     204
    206205    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    207       return CreateRegressionSolution(problemData);
    208     }
    209     public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    210       return new NearestNeighbourClassificationSolution(new ClassificationProblemData(problemData), this);
    211     }
    212     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    213       return CreateClassificationSolution(problemData);
     206      return new NearestNeighbourRegressionSolution(this, new RegressionProblemData(problemData));
     207    }
     208    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     209      return new NearestNeighbourClassificationSolution(this, new ClassificationProblemData(problemData));
    214210    }
    215211
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegression.cs

    r13238 r13948  
    8080    public static IRegressionSolution CreateNearestNeighbourRegressionSolution(IRegressionProblemData problemData, int k) {
    8181      var clonedProblemData = (IRegressionProblemData)problemData.Clone();
    82       return new NearestNeighbourRegressionSolution(clonedProblemData, Train(problemData, k));
     82      return new NearestNeighbourRegressionSolution(Train(problemData, k), clonedProblemData);
    8383    }
    8484
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegressionSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public NearestNeighbourRegressionSolution(IRegressionProblemData problemData, INearestNeighbourModel nnModel)
     45    public NearestNeighbourRegressionSolution(INearestNeighbourModel nnModel, IRegressionProblemData problemData)
    4646      : base(nnModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs

    r13238 r13948  
    220220
    221221      var problemDataClone = (IClassificationProblemData)problemData.Clone();
    222       return new NeuralNetworkClassificationSolution(problemDataClone, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()));
     222      return new NeuralNetworkClassificationSolution(new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()), problemDataClone);
    223223    }
    224224    #endregion
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassificationSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public NeuralNetworkClassificationSolution(IClassificationProblemData problemData, INeuralNetworkModel nnModel)
     45    public NeuralNetworkClassificationSolution(INeuralNetworkModel nnModel, IClassificationProblemData problemData)
    4646      : base(nnModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassification.cs

    r13238 r13948  
    204204      relClassError = alglib.mlperelclserror(mlpEnsemble, inputMatrix, nRows);
    205205      var problemDataClone = (IClassificationProblemData)problemData.Clone();
    206       return new NeuralNetworkEnsembleClassificationSolution(problemDataClone, new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()));
     206      return new NeuralNetworkEnsembleClassificationSolution(new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()), problemDataClone);
    207207    }
    208208    #endregion
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassificationSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public NeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, INeuralNetworkEnsembleModel nnModel)
     45    public NeuralNetworkEnsembleClassificationSolution(INeuralNetworkEnsembleModel nnModel, IClassificationProblemData problemData)
    4646      : base(nnModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs

    r12509 r13948  
    3434  [StorableClass]
    3535  [Item("NeuralNetworkEnsembleModel", "Represents a neural network ensemble for regression and classification.")]
    36   public sealed class NeuralNetworkEnsembleModel : NamedItem, INeuralNetworkEnsembleModel {
     36  public sealed class NeuralNetworkEnsembleModel : ClassificationModel, INeuralNetworkEnsembleModel {
    3737
    3838    private alglib.mlpensemble mlpEnsemble;
     
    4646        }
    4747      }
     48    }
     49
     50    public override IEnumerable<string> VariablesUsedForPrediction {
     51      get { return allowedInputVariables; }
    4852    }
    4953
     
    7276    }
    7377    public NeuralNetworkEnsembleModel(alglib.mlpensemble mlpEnsemble, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    74       : base() {
     78      : base(targetVariable) {
    7579      this.name = ItemName;
    7680      this.description = ItemDescription;
     
    103107    }
    104108
    105     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     109    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    106110      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    107111
     
    129133    }
    130134
    131     public INeuralNetworkEnsembleRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    132       return new NeuralNetworkEnsembleRegressionSolution(new RegressionEnsembleProblemData(problemData), this);
    133     }
    134     IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    135       return CreateRegressionSolution(problemData);
    136     }
    137     public INeuralNetworkEnsembleClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    138       return new NeuralNetworkEnsembleClassificationSolution(new ClassificationEnsembleProblemData(problemData), this);
    139     }
    140     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    141       return CreateClassificationSolution(problemData);
     135    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     136      return new NeuralNetworkEnsembleRegressionSolution(this, new RegressionEnsembleProblemData(problemData));
     137    }
     138    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     139      return new NeuralNetworkEnsembleClassificationSolution(this, new ClassificationEnsembleProblemData(problemData));
    142140    }
    143141
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegression.cs

    r13238 r13948  
    190190      avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows);
    191191
    192       return new NeuralNetworkEnsembleRegressionSolution((IRegressionProblemData)problemData.Clone(), new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables));
     192      return new NeuralNetworkEnsembleRegressionSolution(new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables), (IRegressionProblemData)problemData.Clone());
    193193    }
    194194    #endregion
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegressionSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public NeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, INeuralNetworkEnsembleModel nnModel)
     45    public NeuralNetworkEnsembleRegressionSolution(INeuralNetworkEnsembleModel nnModel, IRegressionProblemData problemData)
    4646      : base(nnModel, problemData) {
    4747      RecalculateResults();
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs

    r12817 r13948  
    3434  [StorableClass]
    3535  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
    36   public sealed class NeuralNetworkModel : NamedItem, INeuralNetworkModel {
     36  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
    3737
    3838    private alglib.multilayerperceptron multiLayerPerceptron;
     
    4848    }
    4949
    50     [Storable]
    51     private string targetVariable;
     50    public override IEnumerable<string> VariablesUsedForPrediction {
     51      get { return allowedInputVariables; }
     52    }
     53
    5254    [Storable]
    5355    private string[] allowedInputVariables;
     
    7476      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
    7577      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
    76       targetVariable = original.targetVariable;
    7778      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    7879      if (original.classValues != null)
     
    8081    }
    8182    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    82       : base() {
     83      : base(targetVariable) {
    8384      this.name = ItemName;
    8485      this.description = ItemDescription;
    8586      this.multiLayerPerceptron = multiLayerPerceptron;
    86       this.targetVariable = targetVariable;
    8787      this.allowedInputVariables = allowedInputVariables.ToArray();
    8888      if (classValues != null)
     
    111111    }
    112112
    113     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     113    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    114114      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    115115
     
    137137    }
    138138
    139     public INeuralNetworkRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    140       return new NeuralNetworkRegressionSolution(new RegressionProblemData(problemData), this);
    141     }
    142     IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    143       return CreateRegressionSolution(problemData);
    144     }
    145     public INeuralNetworkClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    146       return new NeuralNetworkClassificationSolution(new ClassificationProblemData(problemData), this);
    147     }
    148     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    149       return CreateClassificationSolution(problemData);
     139    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     140      return new NeuralNetworkRegressionSolution(this, new RegressionProblemData(problemData));
     141    }
     142    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     143      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
    150144    }
    151145
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs

    r13238 r13948  
    207207      avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows);
    208208
    209       return new NeuralNetworkRegressionSolution((IRegressionProblemData)problemData.Clone(), new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));
     209      return new NeuralNetworkRegressionSolution(new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables), (IRegressionProblemData)problemData.Clone());
    210210    }
    211211    #endregion
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegressionSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public NeuralNetworkRegressionSolution(IRegressionProblemData problemData, INeuralNetworkModel nnModel)
     45    public NeuralNetworkRegressionSolution(INeuralNetworkModel nnModel, IRegressionProblemData problemData)
    4646      : base(nnModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestClassification.cs

    r13238 r13948  
    143143
    144144      if (CreateSolution) {
    145         var solution = new RandomForestClassificationSolution((IClassificationProblemData)Problem.ProblemData.Clone(), model);
     145        var solution = new RandomForestClassificationSolution(model, (IClassificationProblemData)Problem.ProblemData.Clone());
    146146        Results.Add(new Result(RandomForestClassificationModelResultName, "The random forest classification solution.", solution));
    147147      }
    148148    }
    149    
     149
    150150    // keep for compatibility with old API
    151151    public static RandomForestClassificationSolution CreateRandomForestClassificationSolution(IClassificationProblemData problemData, int nTrees, double r, double m, int seed,
    152152      out double rmsError, out double relClassificationError, out double outOfBagRmsError, out double outOfBagRelClassificationError) {
    153153      var model = CreateRandomForestClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError);
    154       return new RandomForestClassificationSolution((IClassificationProblemData)problemData.Clone(), model);
     154      return new RandomForestClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
    155155    }
    156156
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestClassificationSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public RandomForestClassificationSolution(IClassificationProblemData problemData, IRandomForestModel randomForestModel)
     45    public RandomForestClassificationSolution(IRandomForestModel randomForestModel, IClassificationProblemData problemData)
    4646      : base(randomForestModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs

    r12509 r13948  
    3434  [StorableClass]
    3535  [Item("RandomForestModel", "Represents a random forest for regression and classification.")]
    36   public sealed class RandomForestModel : NamedItem, IRandomForestModel {
     36  public sealed class RandomForestModel : ClassificationModel, IRandomForestModel {
    3737    // not persisted
    3838    private alglib.decisionforest randomForest;
     
    4444      }
    4545    }
     46
     47    public override IEnumerable<string> VariablesUsedForPrediction {
     48      get { return originalTrainingData.AllowedInputVariables; }
     49    }
     50
    4651
    4752    // instead of storing the data of the model itself
     
    9196
    9297    // random forest models can only be created through the static factory methods CreateRegressionModel and CreateClassificationModel
    93     private RandomForestModel(alglib.decisionforest randomForest,
     98    private RandomForestModel(string targetVariable, alglib.decisionforest randomForest,
    9499      int seed, IDataAnalysisProblemData originalTrainingData,
    95100      int nTrees, double r, double m, double[] classValues = null)
    96       : base() {
     101      : base(targetVariable) {
    97102      this.name = ItemName;
    98103      this.description = ItemDescription;
     
    147152    }
    148153
    149     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     154    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    150155      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, AllowedInputVariables, rows);
    151156      AssertInputMatrix(inputData);
     
    174179    }
    175180
    176     public IRandomForestRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    177       return new RandomForestRegressionSolution(new RegressionProblemData(problemData), this);
    178     }
    179     IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    180       return CreateRegressionSolution(problemData);
    181     }
    182     public IRandomForestClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    183       return new RandomForestClassificationSolution(new ClassificationProblemData(problemData), this);
    184     }
    185     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    186       return CreateClassificationSolution(problemData);
     181
     182    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     183      return new RandomForestRegressionSolution(this, new RegressionProblemData(problemData));
     184    }
     185    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     186      return new RandomForestClassificationSolution(this, new ClassificationProblemData(problemData));
    187187    }
    188188
     
    205205      outOfBagRmsError = rep.oobrmserror;
    206206
    207       return new RandomForestModel(dForest, seed, problemData, nTrees, r, m);
     207      return new RandomForestModel(problemData.TargetVariable, dForest, seed, problemData, nTrees, r, m);
    208208    }
    209209
     
    242242      outOfBagRelClassificationError = rep.oobrelclserror;
    243243
    244       return new RandomForestModel(dForest, seed, problemData, nTrees, r, m, classValues);
     244      return new RandomForestModel(problemData.TargetVariable, dForest, seed, problemData, nTrees, r, m, classValues);
    245245    }
    246246
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestRegression.cs

    r13238 r13948  
    143143
    144144      if (CreateSolution) {
    145         var solution = new RandomForestRegressionSolution((IRegressionProblemData)Problem.ProblemData.Clone(), model);
     145        var solution = new RandomForestRegressionSolution(model, (IRegressionProblemData)Problem.ProblemData.Clone());
    146146        Results.Add(new Result(RandomForestRegressionModelResultName, "The random forest regression solution.", solution));
    147147      }
     
    153153      var model = CreateRandomForestRegressionModel(problemData, nTrees, r, m, seed,
    154154        out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
    155       return new RandomForestRegressionSolution((IRegressionProblemData)problemData.Clone(), model);
     155      return new RandomForestRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
    156156    }
    157157
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestRegressionSolution.cs

    r12012 r13948  
    4343      : base(original, cloner) {
    4444    }
    45     public RandomForestRegressionSolution(IRegressionProblemData problemData, IRandomForestModel randomForestModel)
     45    public RandomForestRegressionSolution(IRandomForestModel randomForestModel, IRegressionProblemData problemData)
    4646      : base(randomForestModel, problemData) {
    4747    }
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineModel.cs

    r12509 r13948  
    3737  [StorableClass]
    3838  [Item("SupportVectorMachineModel", "Represents a support vector machine model.")]
    39   public sealed class SupportVectorMachineModel : NamedItem, ISupportVectorMachineModel {
     39  public sealed class SupportVectorMachineModel : ClassificationModel, ISupportVectorMachineModel {
     40    public override IEnumerable<string> VariablesUsedForPrediction {
     41      get { return allowedInputVariables; }
     42    }
     43
    4044
    4145    private svm_model model;
     
    8387
    8488    [Storable]
    85     private string targetVariable;
    86     [Storable]
    8789    private string[] allowedInputVariables;
    8890    [Storable]
     
    9698      this.model = original.model;
    9799      this.rangeTransform = original.rangeTransform;
    98       this.targetVariable = original.targetVariable;
    99100      this.allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    100101      if (original.classValues != null)
     
    106107    }
    107108    public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables)
    108       : base() {
     109      : base(targetVariable) {
    109110      this.name = ItemName;
    110111      this.description = ItemDescription;
    111112      this.model = model;
    112113      this.rangeTransform = rangeTransform;
    113       this.targetVariable = targetVariable;
    114114      this.allowedInputVariables = allowedInputVariables.ToArray();
    115115    }
     
    123123      return GetEstimatedValuesHelper(dataset, rows);
    124124    }
    125     public SupportVectorRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     125    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    126126      return new SupportVectorRegressionSolution(this, new RegressionProblemData(problemData));
    127127    }
    128     IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
    129       return CreateRegressionSolution(problemData);
    130     }
    131128    #endregion
    132129
    133130    #region IClassificationModel Members
    134     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
     131    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    135132      if (classValues == null) throw new NotSupportedException();
    136133      // return the original class value instead of the predicted value of the model
     
    152149    }
    153150
    154     public SupportVectorClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
     151    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    155152      return new SupportVectorClassificationSolution(this, new ClassificationProblemData(problemData));
    156     }
    157     IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
    158       return CreateClassificationSolution(problemData);
    159153    }
    160154    #endregion
    161155    private IEnumerable<double> GetEstimatedValuesHelper(IDataset dataset, IEnumerable<int> rows) {
    162156      // calculate predictions for the currently requested rows
    163       svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     157      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, TargetVariable, allowedInputVariables, rows);
    164158      svm_problem scaledProblem = rangeTransform.Scale(problem);
    165159
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/TimeSeries/AutoregressiveModeling.cs

    r13238 r13948  
    134134
    135135      var interpreter = new SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(problemData.TargetVariable);
    136       var model = new SymbolicTimeSeriesPrognosisModel(tree, interpreter);
     136      var model = new SymbolicTimeSeriesPrognosisModel(problemData.TargetVariable, tree, interpreter);
    137137      var solution = model.CreateTimeSeriesPrognosisSolution((ITimeSeriesPrognosisProblemData)problemData.Clone());
    138138      return solution;
  • branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/kMeans/KMeansClusteringModel.cs

    r12509 r13948  
    3737    public static new Image StaticItemImage {
    3838      get { return HeuristicLab.Common.Resources.VSImageLibrary.Function; }
     39    }
     40
     41    public IEnumerable<string> VariablesUsedForPrediction {
     42      get { return allowedInputVariables; }
    3943    }
    4044
Note: See TracChangeset for help on using the changeset viewer.