Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
07/08/14 19:03:36 (10 years ago)
Author:
mkommend
Message:

#1758: Merged r10173:10176 and r10540, r10541, r10543, r10545 and r11031 into stable.

Location:
stable
Files:
8 edited

Legend:

Unmodified
Added
Removed
  • stable

  • stable/HeuristicLab.Problems.DataAnalysis

  • stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationProblemData.cs

    r9456 r11144  
    221221    public string TargetVariable {
    222222      get { return TargetVariableParameter.Value.Value; }
     223      set {
     224        if (value == null) throw new ArgumentNullException("targetVariable", "The provided value for the targetVariable is null.");
     225        if (value == TargetVariable) return;
     226
     227
     228        var matchingParameterValue = TargetVariableParameter.ValidValues.FirstOrDefault(v => v.Value == value);
     229        if (matchingParameterValue == null) throw new ArgumentException("The provided value is not valid as the targetVariable.", "targetVariable");
     230        TargetVariableParameter.Value = matchingParameterValue;
     231      }
    223232    }
    224233
     
    408417    }
    409418    #endregion
     419
     420    protected override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) {
     421      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
     422      IClassificationProblemData classificationProblemData = problemData as IClassificationProblemData;
     423      if (classificationProblemData == null)
     424        throw new ArgumentException("The problem data is no classification problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     425
     426      var returnValue = base.IsProblemDataCompatible(classificationProblemData, out errorMessage);
     427      //check targetVariable
     428      if (classificationProblemData.InputVariables.All(var => var.Value != TargetVariable)) {
     429        errorMessage = string.Format("The target variable {0} is not present in the new problem data.", TargetVariable)
     430                       + Environment.NewLine + errorMessage;
     431        return false;
     432      }
     433
     434      var newClassValues = classificationProblemData.Dataset.GetDoubleValues(TargetVariable).Distinct().OrderBy(x => x);
     435      if (!newClassValues.SequenceEqual(ClassValues)) {
     436        errorMessage = errorMessage + string.Format("The class values differ in the provided classification problem data.");
     437        return false;
     438      }
     439
     440      return returnValue;
     441    }
     442
     443    public override void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) {
     444      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
     445      ClassificationProblemData classificationProblemData = problemData as ClassificationProblemData;
     446      if (classificationProblemData == null)
     447        throw new ArgumentException("The problem data is not a classification problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     448
     449      base.AdjustProblemDataProperties(problemData);
     450      TargetVariable = classificationProblemData.TargetVariable;
     451      for (int i = 0; i < classificationProblemData.ClassNames.Count(); i++)
     452        ClassNamesParameter.Value[i, 0] = classificationProblemData.ClassNames.ElementAt(i);
     453
     454      for (int i = 0; i < Classes; i++) {
     455        for (int j = 0; j < Classes; j++) {
     456          ClassificationPenaltiesParameter.Value[i, j] = classificationProblemData.GetClassificationPenalty(ClassValuesCache[i], ClassValuesCache[j]);
     457        }
     458      }
     459    }
    410460  }
    411461}
  • stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/DataAnalysisProblemData.cs

    r9456 r11144  
    2323using System.Collections.Generic;
    2424using System.Linq;
     25using System.Text;
    2526using HeuristicLab.Collections;
    2627using HeuristicLab.Common;
     
    158159      if (listeners != null) listeners(this, EventArgs.Empty);
    159160    }
     161
     162    protected virtual bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) {
     163      errorMessage = string.Empty;
     164      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
     165
     166      //check allowed input variables
     167      StringBuilder message = new StringBuilder();
     168      var variables = new HashSet<string>(problemData.InputVariables.Select(x => x.Value));
     169      foreach (var item in AllowedInputVariables) {
     170        if (!variables.Contains(item))
     171          message.AppendLine("Input variable '" + item + "' is not present in the new problem data.");
     172      }
     173
     174      if (message.Length != 0) {
     175        errorMessage = message.ToString();
     176        return false;
     177      }
     178      return true;
     179
     180    }
     181
     182    public virtual void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) {
     183      DataAnalysisProblemData data = problemData as DataAnalysisProblemData;
     184      if (data == null) throw new ArgumentException("The problem data is not a data analysis problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     185
     186      string errorMessage;
     187      if (!data.IsProblemDataCompatible(this, out errorMessage)) {
     188        throw new InvalidOperationException(errorMessage);
     189      }
     190
     191      foreach (var inputVariable in InputVariables) {
     192        var variable = data.InputVariables.FirstOrDefault(i => i.Value == inputVariable.Value);
     193        InputVariables.SetItemCheckedState(inputVariable, variable != null && data.InputVariables.ItemChecked(variable));
     194      }
     195
     196      TrainingPartition.Start = TrainingPartition.End = 0;
     197      TestPartition.Start = 0;
     198      TestPartition.End = Dataset.Rows;
     199    }
    160200  }
    161201}
  • stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/RegressionProblemData.cs

    r9456 r11144  
    100100    public string TargetVariable {
    101101      get { return TargetVariableParameter.Value.Value; }
     102      set {
     103        if (value == null) throw new ArgumentNullException("targetVariable", "The provided value for the targetVariable is null.");
     104        if (value == TargetVariable) return;
     105
     106        var matchingParameterValue = TargetVariableParameter.ValidValues.FirstOrDefault(v => v.Value == value);
     107        if (matchingParameterValue == null) throw new ArgumentException("The provided value is not valid as the targetVariable.", "targetVariable");
     108        TargetVariableParameter.Value = matchingParameterValue;
     109      }
    102110    }
    103111
     
    142150      OnChanged();
    143151    }
     152
     153    protected override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) {
     154      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
     155      IRegressionProblemData regressionProblemData = problemData as IRegressionProblemData;
     156      if (regressionProblemData == null)
     157        throw new ArgumentException("The problem data is not a regression problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     158
     159      var returnValue = base.IsProblemDataCompatible(problemData, out errorMessage);
     160      //check targetVariable
     161      if (problemData.InputVariables.All(var => var.Value != TargetVariable)) {
     162        errorMessage = string.Format("The target variable {0} is not present in the new problem data.", TargetVariable)
     163                       + Environment.NewLine + errorMessage;
     164        return false;
     165      }
     166      return returnValue;
     167    }
     168
     169    public override void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) {
     170      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
     171      RegressionProblemData regressionProblemData = problemData as RegressionProblemData;
     172      if (regressionProblemData == null)
     173        throw new ArgumentException("The problem data is not a regression problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     174
     175      base.AdjustProblemDataProperties(problemData);
     176      TargetVariable = regressionProblemData.TargetVariable;
     177    }
    144178  }
    145179}
  • stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisProblemData.cs

    r9572 r11144  
    16211621    }
    16221622
     1623    protected override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) {
     1624      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
     1625      ITimeSeriesPrognosisProblemData timeseriesProblemData = problemData as ITimeSeriesPrognosisProblemData;
     1626      if (timeseriesProblemData == null)
     1627        throw new ArgumentException("The problem data is not a time-series problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     1628
     1629      var returnValue = base.IsProblemDataCompatible(problemData, out errorMessage);
     1630      //check targetVariable
     1631      if (problemData.InputVariables.All(var => var.Value != TargetVariable)) {
     1632        errorMessage = string.Format("The target variable {0} is not present in the new problem data.", TargetVariable)
     1633                       + Environment.NewLine + errorMessage;
     1634        return false;
     1635      }
     1636      return returnValue;
     1637    }
     1638
     1639    public override void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) {
     1640      TimeSeriesPrognosisProblemData timeSeriesProblemData = problemData as TimeSeriesPrognosisProblemData;
     1641      if (timeSeriesProblemData == null)
     1642        throw new ArgumentException("The problem data is not a timeseries problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
     1643
     1644      var trainingDataStart = TrainingIndices.First();
     1645     
     1646      base.AdjustProblemDataProperties(problemData);
     1647     
     1648      TestPartition.Start = trainingDataStart;
     1649
     1650      TrainingHorizon = timeSeriesProblemData.TrainingHorizon;
     1651      TestHorizon = timeSeriesProblemData.TestHorizon;
     1652    }
     1653
    16231654  }
    16241655}
  • stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisResults.cs

    r9456 r11144  
    369369      OnlineCalculatorError errorState;
    370370      var problemData = Solution.ProblemData;
     371      if (!problemData.TrainingIndices.Any()) return;
    371372      var model = Solution.Model;
    372373      //mean model
     
    415416      OnlineCalculatorError errorState;
    416417      var problemData = Solution.ProblemData;
     418      if (!problemData.TestIndices.Any()) return;
    417419      var model = Solution.Model;
    418       //mean model
    419       double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average();
    420       var meanModel = new ConstantTimeSeriesPrognosisModel(trainingMean);
    421 
    422       //AR1 model
    423       double alpha, beta;
    424       IEnumerable<double> trainingStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Select(r => r - 1).Where(r => r > 0)).ToList();
    425       OnlineLinearScalingParameterCalculator.Calculate(problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Where(x => x > 0)), trainingStartValues, out alpha, out beta, out errorState);
    426       var AR1model = new TimeSeriesPrognosisAutoRegressiveModel(problemData.TargetVariable, new double[] { beta }, alpha);
    427 
    428420      var testHorizions = problemData.TestIndices.Select(r => Math.Min(testHorizon, problemData.TestPartition.End - r)).ToList();
    429421      IEnumerable<IEnumerable<double>> testTargetValues = problemData.TestIndices.Zip(testHorizions, Enumerable.Range).Select(r => problemData.Dataset.GetDoubleValues(problemData.TargetVariable, r)).ToList();
    430422      IEnumerable<IEnumerable<double>> testEstimatedValues = model.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();
    431423      IEnumerable<double> testStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TestIndices.Select(r => r - 1).Where(r => r > 0)).ToList();
    432       IEnumerable<IEnumerable<double>> testMeanModelPredictions = meanModel.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();
    433       IEnumerable<IEnumerable<double>> testAR1ModelPredictions = AR1model.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();
    434424
    435425      IEnumerable<double> originalTestValues = testTargetValues.SelectMany(x => x).ToList();
     
    453443      PrognosisTestWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(testStartValues, testTargetValues, testEstimatedValues, out errorState);
    454444      PrognosisTestWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? PrognosisTestWeightedDirectionalSymmetry : 0.0;
    455       PrognosisTestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState);
    456       PrognosisTestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticAR1 : double.PositiveInfinity;
    457       PrognosisTestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState);
    458       PrognosisTestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticMean : double.PositiveInfinity;
     445
     446
     447      if (problemData.TrainingIndices.Any()) {
     448        //mean model
     449        double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average();
     450        var meanModel = new ConstantTimeSeriesPrognosisModel(trainingMean);
     451
     452        //AR1 model
     453        double alpha, beta;
     454        IEnumerable<double> trainingStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Select(r => r - 1).Where(r => r > 0)).ToList();
     455        OnlineLinearScalingParameterCalculator.Calculate(problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Where(x => x > 0)), trainingStartValues, out alpha, out beta, out errorState);
     456        var AR1model = new TimeSeriesPrognosisAutoRegressiveModel(problemData.TargetVariable, new double[] { beta }, alpha);
     457
     458        IEnumerable<IEnumerable<double>> testMeanModelPredictions = meanModel.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();
     459        IEnumerable<IEnumerable<double>> testAR1ModelPredictions = AR1model.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();
     460
     461        PrognosisTestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState);
     462        PrognosisTestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticAR1 : double.PositiveInfinity;
     463        PrognosisTestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState);
     464        PrognosisTestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticMean : double.PositiveInfinity;
     465      }
    459466    }
    460467  }
  • stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisSolutionBase.cs

    r9462 r11144  
    149149    protected void CalculateTimeSeriesResults() {
    150150      OnlineCalculatorError errorState;
    151       double trainingMean = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).Average();
     151      double trainingMean = ProblemData.TrainingIndices.Any() ? ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).Average() : double.NaN;
    152152      var meanModel = new ConstantTimeSeriesPrognosisModel(trainingMean);
    153153
     
    159159
    160160      #region Calculate training quality measures
    161       IEnumerable<double> trainingTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).ToList();
    162       IEnumerable<double> trainingEstimatedValues = EstimatedTrainingValues.ToList();
    163       IEnumerable<double> trainingMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList();
    164       IEnumerable<double> trainingAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList();
    165 
    166       TrainingDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState);
    167       TrainingDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingDirectionalSymmetry : 0.0;
    168       TrainingWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState);
    169       TrainingWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingWeightedDirectionalSymmetry : 0.0;
    170       TrainingTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingAR1ModelPredictions, trainingEstimatedValues, out errorState);
    171       TrainingTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticAR1 : double.PositiveInfinity;
    172       TrainingTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingMeanModelPredictions, trainingEstimatedValues, out errorState);
    173       TrainingTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticMean : double.PositiveInfinity;
     161      if (ProblemData.TrainingIndices.Any()) {
     162        IEnumerable<double> trainingTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).ToList();
     163        IEnumerable<double> trainingEstimatedValues = EstimatedTrainingValues.ToList();
     164        IEnumerable<double> trainingMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList();
     165        IEnumerable<double> trainingAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList();
     166
     167        TrainingDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState);
     168        TrainingDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingDirectionalSymmetry : 0.0;
     169        TrainingWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState);
     170        TrainingWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingWeightedDirectionalSymmetry : 0.0;
     171        TrainingTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingAR1ModelPredictions, trainingEstimatedValues, out errorState);
     172        TrainingTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticAR1 : double.PositiveInfinity;
     173        TrainingTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingMeanModelPredictions, trainingEstimatedValues, out errorState);
     174        TrainingTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticMean : double.PositiveInfinity;
     175      }
    174176      #endregion
    175177
    176178      #region Calculate test quality measures
    177       IEnumerable<double> testTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices).ToList();
    178       IEnumerable<double> testEstimatedValues = EstimatedTestValues.ToList();
    179       IEnumerable<double> testMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList();
    180       IEnumerable<double> testAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList();
    181 
    182       TestDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState);
    183       TestDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestDirectionalSymmetry : 0.0;
    184       TestWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState);
    185       TestWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestWeightedDirectionalSymmetry : 0.0;
    186       TestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState);
    187       TestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticAR1 : double.PositiveInfinity;
    188       TestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState);
    189       TestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticMean : double.PositiveInfinity;
     179      if (ProblemData.TestIndices.Any()) {
     180        IEnumerable<double> testTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices).ToList();
     181        IEnumerable<double> testEstimatedValues = EstimatedTestValues.ToList();
     182        IEnumerable<double> testMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList();
     183        IEnumerable<double> testAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList();
     184
     185        TestDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState);
     186        TestDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestDirectionalSymmetry : 0.0;
     187        TestWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState);
     188        TestWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestWeightedDirectionalSymmetry : 0.0;
     189        TestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState);
     190        TestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticAR1 : double.PositiveInfinity;
     191        TestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState);
     192        TestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticMean : double.PositiveInfinity;
     193      }
    190194      #endregion
    191195    }
Note: See TracChangeset for help on using the changeset viewer.