Changeset 11144 for stable/HeuristicLab.Problems.DataAnalysis
- Timestamp:
- 07/08/14 19:03:36 (10 years ago)
- Location:
- stable
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
stable
- Property svn:mergeinfo changed
/trunk/sources merged: 10173-10176,10540-10541,10543,10545,11031
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis merged: 10540,11031
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationProblemData.cs
r9456 r11144 221 221 public string TargetVariable { 222 222 get { return TargetVariableParameter.Value.Value; } 223 set { 224 if (value == null) throw new ArgumentNullException("targetVariable", "The provided value for the targetVariable is null."); 225 if (value == TargetVariable) return; 226 227 228 var matchingParameterValue = TargetVariableParameter.ValidValues.FirstOrDefault(v => v.Value == value); 229 if (matchingParameterValue == null) throw new ArgumentException("The provided value is not valid as the targetVariable.", "targetVariable"); 230 TargetVariableParameter.Value = matchingParameterValue; 231 } 223 232 } 224 233 … … 408 417 } 409 418 #endregion 419 420 protected override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) { 421 if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null."); 422 IClassificationProblemData classificationProblemData = problemData as IClassificationProblemData; 423 if (classificationProblemData == null) 424 throw new ArgumentException("The problem data is no classification problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 425 426 var returnValue = base.IsProblemDataCompatible(classificationProblemData, out errorMessage); 427 //check targetVariable 428 if (classificationProblemData.InputVariables.All(var => var.Value != TargetVariable)) { 429 errorMessage = string.Format("The target variable {0} is not present in the new problem data.", TargetVariable) 430 + Environment.NewLine + errorMessage; 431 return false; 432 } 433 434 var newClassValues = classificationProblemData.Dataset.GetDoubleValues(TargetVariable).Distinct().OrderBy(x => x); 435 if (!newClassValues.SequenceEqual(ClassValues)) { 436 errorMessage = errorMessage + string.Format("The class values differ in the provided classification problem data."); 437 return false; 438 } 439 440 return returnValue; 441 } 442 443 public override void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) { 444 if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null."); 445 ClassificationProblemData classificationProblemData = problemData as ClassificationProblemData; 446 if (classificationProblemData == null) 447 throw new ArgumentException("The problem data is not a classification problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 448 449 base.AdjustProblemDataProperties(problemData); 450 TargetVariable = classificationProblemData.TargetVariable; 451 for (int i = 0; i < classificationProblemData.ClassNames.Count(); i++) 452 ClassNamesParameter.Value[i, 0] = classificationProblemData.ClassNames.ElementAt(i); 453 454 for (int i = 0; i < Classes; i++) { 455 for (int j = 0; j < Classes; j++) { 456 ClassificationPenaltiesParameter.Value[i, j] = classificationProblemData.GetClassificationPenalty(ClassValuesCache[i], ClassValuesCache[j]); 457 } 458 } 459 } 410 460 } 411 461 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/DataAnalysisProblemData.cs
r9456 r11144 23 23 using System.Collections.Generic; 24 24 using System.Linq; 25 using System.Text; 25 26 using HeuristicLab.Collections; 26 27 using HeuristicLab.Common; … … 158 159 if (listeners != null) listeners(this, EventArgs.Empty); 159 160 } 161 162 protected virtual bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) { 163 errorMessage = string.Empty; 164 if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null."); 165 166 //check allowed input variables 167 StringBuilder message = new StringBuilder(); 168 var variables = new HashSet<string>(problemData.InputVariables.Select(x => x.Value)); 169 foreach (var item in AllowedInputVariables) { 170 if (!variables.Contains(item)) 171 message.AppendLine("Input variable '" + item + "' is not present in the new problem data."); 172 } 173 174 if (message.Length != 0) { 175 errorMessage = message.ToString(); 176 return false; 177 } 178 return true; 179 180 } 181 182 public virtual void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) { 183 DataAnalysisProblemData data = problemData as DataAnalysisProblemData; 184 if (data == null) throw new ArgumentException("The problem data is not a data analysis problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 185 186 string errorMessage; 187 if (!data.IsProblemDataCompatible(this, out errorMessage)) { 188 throw new InvalidOperationException(errorMessage); 189 } 190 191 foreach (var inputVariable in InputVariables) { 192 var variable = data.InputVariables.FirstOrDefault(i => i.Value == inputVariable.Value); 193 InputVariables.SetItemCheckedState(inputVariable, variable != null && data.InputVariables.ItemChecked(variable)); 194 } 195 196 TrainingPartition.Start = TrainingPartition.End = 0; 197 TestPartition.Start = 0; 198 TestPartition.End = Dataset.Rows; 199 } 160 200 } 161 201 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/RegressionProblemData.cs
r9456 r11144 100 100 public string TargetVariable { 101 101 get { return TargetVariableParameter.Value.Value; } 102 set { 103 if (value == null) throw new ArgumentNullException("targetVariable", "The provided value for the targetVariable is null."); 104 if (value == TargetVariable) return; 105 106 var matchingParameterValue = TargetVariableParameter.ValidValues.FirstOrDefault(v => v.Value == value); 107 if (matchingParameterValue == null) throw new ArgumentException("The provided value is not valid as the targetVariable.", "targetVariable"); 108 TargetVariableParameter.Value = matchingParameterValue; 109 } 102 110 } 103 111 … … 142 150 OnChanged(); 143 151 } 152 153 protected override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) { 154 if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null."); 155 IRegressionProblemData regressionProblemData = problemData as IRegressionProblemData; 156 if (regressionProblemData == null) 157 throw new ArgumentException("The problem data is not a regression problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 158 159 var returnValue = base.IsProblemDataCompatible(problemData, out errorMessage); 160 //check targetVariable 161 if (problemData.InputVariables.All(var => var.Value != TargetVariable)) { 162 errorMessage = string.Format("The target variable {0} is not present in the new problem data.", TargetVariable) 163 + Environment.NewLine + errorMessage; 164 return false; 165 } 166 return returnValue; 167 } 168 169 public override void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) { 170 if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null."); 171 RegressionProblemData regressionProblemData = problemData as RegressionProblemData; 172 if (regressionProblemData == null) 173 throw new ArgumentException("The problem data is not a regression problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 174 175 base.AdjustProblemDataProperties(problemData); 176 TargetVariable = regressionProblemData.TargetVariable; 177 } 144 178 } 145 179 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisProblemData.cs
r9572 r11144 1621 1621 } 1622 1622 1623 protected override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) { 1624 if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null."); 1625 ITimeSeriesPrognosisProblemData timeseriesProblemData = problemData as ITimeSeriesPrognosisProblemData; 1626 if (timeseriesProblemData == null) 1627 throw new ArgumentException("The problem data is not a time-series problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 1628 1629 var returnValue = base.IsProblemDataCompatible(problemData, out errorMessage); 1630 //check targetVariable 1631 if (problemData.InputVariables.All(var => var.Value != TargetVariable)) { 1632 errorMessage = string.Format("The target variable {0} is not present in the new problem data.", TargetVariable) 1633 + Environment.NewLine + errorMessage; 1634 return false; 1635 } 1636 return returnValue; 1637 } 1638 1639 public override void AdjustProblemDataProperties(IDataAnalysisProblemData problemData) { 1640 TimeSeriesPrognosisProblemData timeSeriesProblemData = problemData as TimeSeriesPrognosisProblemData; 1641 if (timeSeriesProblemData == null) 1642 throw new ArgumentException("The problem data is not a timeseries problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData"); 1643 1644 var trainingDataStart = TrainingIndices.First(); 1645 1646 base.AdjustProblemDataProperties(problemData); 1647 1648 TestPartition.Start = trainingDataStart; 1649 1650 TrainingHorizon = timeSeriesProblemData.TrainingHorizon; 1651 TestHorizon = timeSeriesProblemData.TestHorizon; 1652 } 1653 1623 1654 } 1624 1655 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisResults.cs
r9456 r11144 369 369 OnlineCalculatorError errorState; 370 370 var problemData = Solution.ProblemData; 371 if (!problemData.TrainingIndices.Any()) return; 371 372 var model = Solution.Model; 372 373 //mean model … … 415 416 OnlineCalculatorError errorState; 416 417 var problemData = Solution.ProblemData; 418 if (!problemData.TestIndices.Any()) return; 417 419 var model = Solution.Model; 418 //mean model419 double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average();420 var meanModel = new ConstantTimeSeriesPrognosisModel(trainingMean);421 422 //AR1 model423 double alpha, beta;424 IEnumerable<double> trainingStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Select(r => r - 1).Where(r => r > 0)).ToList();425 OnlineLinearScalingParameterCalculator.Calculate(problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Where(x => x > 0)), trainingStartValues, out alpha, out beta, out errorState);426 var AR1model = new TimeSeriesPrognosisAutoRegressiveModel(problemData.TargetVariable, new double[] { beta }, alpha);427 428 420 var testHorizions = problemData.TestIndices.Select(r => Math.Min(testHorizon, problemData.TestPartition.End - r)).ToList(); 429 421 IEnumerable<IEnumerable<double>> testTargetValues = problemData.TestIndices.Zip(testHorizions, Enumerable.Range).Select(r => problemData.Dataset.GetDoubleValues(problemData.TargetVariable, r)).ToList(); 430 422 IEnumerable<IEnumerable<double>> testEstimatedValues = model.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList(); 431 423 IEnumerable<double> testStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TestIndices.Select(r => r - 1).Where(r => r > 0)).ToList(); 432 IEnumerable<IEnumerable<double>> testMeanModelPredictions = meanModel.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();433 IEnumerable<IEnumerable<double>> testAR1ModelPredictions = AR1model.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList();434 424 435 425 IEnumerable<double> originalTestValues = testTargetValues.SelectMany(x => x).ToList(); … … 453 443 PrognosisTestWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(testStartValues, testTargetValues, testEstimatedValues, out errorState); 454 444 PrognosisTestWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? PrognosisTestWeightedDirectionalSymmetry : 0.0; 455 PrognosisTestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState); 456 PrognosisTestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticAR1 : double.PositiveInfinity; 457 PrognosisTestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState); 458 PrognosisTestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticMean : double.PositiveInfinity; 445 446 447 if (problemData.TrainingIndices.Any()) { 448 //mean model 449 double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average(); 450 var meanModel = new ConstantTimeSeriesPrognosisModel(trainingMean); 451 452 //AR1 model 453 double alpha, beta; 454 IEnumerable<double> trainingStartValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Select(r => r - 1).Where(r => r > 0)).ToList(); 455 OnlineLinearScalingParameterCalculator.Calculate(problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices.Where(x => x > 0)), trainingStartValues, out alpha, out beta, out errorState); 456 var AR1model = new TimeSeriesPrognosisAutoRegressiveModel(problemData.TargetVariable, new double[] { beta }, alpha); 457 458 IEnumerable<IEnumerable<double>> testMeanModelPredictions = meanModel.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList(); 459 IEnumerable<IEnumerable<double>> testAR1ModelPredictions = AR1model.GetPrognosedValues(problemData.Dataset, problemData.TestIndices, testHorizions).ToList(); 460 461 PrognosisTestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState); 462 PrognosisTestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticAR1 : double.PositiveInfinity; 463 PrognosisTestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testStartValues, testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState); 464 PrognosisTestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? PrognosisTestTheilsUStatisticMean : double.PositiveInfinity; 465 } 459 466 } 460 467 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisSolutionBase.cs
r9462 r11144 149 149 protected void CalculateTimeSeriesResults() { 150 150 OnlineCalculatorError errorState; 151 double trainingMean = ProblemData. Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).Average();151 double trainingMean = ProblemData.TrainingIndices.Any() ? ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).Average() : double.NaN; 152 152 var meanModel = new ConstantTimeSeriesPrognosisModel(trainingMean); 153 153 … … 159 159 160 160 #region Calculate training quality measures 161 IEnumerable<double> trainingTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).ToList(); 162 IEnumerable<double> trainingEstimatedValues = EstimatedTrainingValues.ToList(); 163 IEnumerable<double> trainingMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList(); 164 IEnumerable<double> trainingAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList(); 165 166 TrainingDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState); 167 TrainingDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingDirectionalSymmetry : 0.0; 168 TrainingWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState); 169 TrainingWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingWeightedDirectionalSymmetry : 0.0; 170 TrainingTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingAR1ModelPredictions, trainingEstimatedValues, out errorState); 171 TrainingTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticAR1 : double.PositiveInfinity; 172 TrainingTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingMeanModelPredictions, trainingEstimatedValues, out errorState); 173 TrainingTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticMean : double.PositiveInfinity; 161 if (ProblemData.TrainingIndices.Any()) { 162 IEnumerable<double> trainingTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).ToList(); 163 IEnumerable<double> trainingEstimatedValues = EstimatedTrainingValues.ToList(); 164 IEnumerable<double> trainingMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList(); 165 IEnumerable<double> trainingAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TrainingIndices).ToList(); 166 167 TrainingDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState); 168 TrainingDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingDirectionalSymmetry : 0.0; 169 TrainingWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingEstimatedValues, out errorState); 170 TrainingWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TrainingWeightedDirectionalSymmetry : 0.0; 171 TrainingTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingAR1ModelPredictions, trainingEstimatedValues, out errorState); 172 TrainingTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticAR1 : double.PositiveInfinity; 173 TrainingTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(trainingTargetValues.First(), trainingTargetValues, trainingMeanModelPredictions, trainingEstimatedValues, out errorState); 174 TrainingTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TrainingTheilsUStatisticMean : double.PositiveInfinity; 175 } 174 176 #endregion 175 177 176 178 #region Calculate test quality measures 177 IEnumerable<double> testTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices).ToList(); 178 IEnumerable<double> testEstimatedValues = EstimatedTestValues.ToList(); 179 IEnumerable<double> testMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList(); 180 IEnumerable<double> testAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList(); 181 182 TestDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState); 183 TestDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestDirectionalSymmetry : 0.0; 184 TestWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState); 185 TestWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestWeightedDirectionalSymmetry : 0.0; 186 TestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState); 187 TestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticAR1 : double.PositiveInfinity; 188 TestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState); 189 TestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticMean : double.PositiveInfinity; 179 if (ProblemData.TestIndices.Any()) { 180 IEnumerable<double> testTargetValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices).ToList(); 181 IEnumerable<double> testEstimatedValues = EstimatedTestValues.ToList(); 182 IEnumerable<double> testMeanModelPredictions = meanModel.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList(); 183 IEnumerable<double> testAR1ModelPredictions = AR1model.GetEstimatedValues(ProblemData.Dataset, ProblemData.TestIndices).ToList(); 184 185 TestDirectionalSymmetry = OnlineDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState); 186 TestDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestDirectionalSymmetry : 0.0; 187 TestWeightedDirectionalSymmetry = OnlineWeightedDirectionalSymmetryCalculator.Calculate(testTargetValues.First(), testTargetValues, testEstimatedValues, out errorState); 188 TestWeightedDirectionalSymmetry = errorState == OnlineCalculatorError.None ? TestWeightedDirectionalSymmetry : 0.0; 189 TestTheilsUStatisticAR1 = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testAR1ModelPredictions, testEstimatedValues, out errorState); 190 TestTheilsUStatisticAR1 = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticAR1 : double.PositiveInfinity; 191 TestTheilsUStatisticMean = OnlineTheilsUStatisticCalculator.Calculate(testTargetValues.First(), testTargetValues, testMeanModelPredictions, testEstimatedValues, out errorState); 192 TestTheilsUStatisticMean = errorState == OnlineCalculatorError.None ? TestTheilsUStatisticMean : double.PositiveInfinity; 193 } 190 194 #endregion 191 195 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/Classification/IClassificationProblemData.cs
r9456 r11144 23 23 namespace HeuristicLab.Problems.DataAnalysis { 24 24 public interface IClassificationProblemData : IDataAnalysisProblemData { 25 string TargetVariable { get; }25 string TargetVariable { get; set; } 26 26 27 27 IEnumerable<string> ClassNames { get; } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/IDataAnalysisProblemData.cs
r9456 r11144 43 43 44 44 event EventHandler Changed; 45 46 void AdjustProblemDataProperties(IDataAnalysisProblemData problemData); 45 47 } 46 48 } -
stable/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/Regression/IRegressionProblemData.cs
r9456 r11144 22 22 namespace HeuristicLab.Problems.DataAnalysis { 23 23 public interface IRegressionProblemData : IDataAnalysisProblemData { 24 string TargetVariable { get; }24 string TargetVariable { get; set; } 25 25 } 26 26 }
Note: See TracChangeset
for help on using the changeset viewer.