Changeset 7989
- Timestamp:
- 06/12/12 10:31:56 (11 years ago)
- Location:
- branches/HeuristicLab.TimeSeries
- Files:
-
- 29 edited
- 2 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/HeuristicLab.TimeSeries/HeuristicLab.Encodings.SymbolicExpressionTreeEncoding/3.4/Compiler/SymbolicExpressionTreeCompiler.cs
r7268 r7989 25 25 26 26 namespace HeuristicLab.Encodings.SymbolicExpressionTreeEncoding { 27 public class SymbolicExpressionTreeCompiler { 28 private Dictionary<string, ushort> entryPoint = new Dictionary<string, ushort>(); 29 private List<Func<Instruction, Instruction>> postInstructionCompiledHooks = new List<Func<Instruction, Instruction>>(); 27 public static class SymbolicExpressionTreeCompiler { 30 28 31 public Instruction[] Compile(ISymbolicExpressionTree tree, Func<ISymbolicExpressionTreeNode, byte> opCodeMapper) { 29 public static Instruction[] Compile(ISymbolicExpressionTree tree, Func<ISymbolicExpressionTreeNode, byte> opCodeMapper) { 30 return Compile(tree, opCodeMapper, Enumerable.Empty<Func<Instruction, Instruction>>()); 31 } 32 public static Instruction[] Compile(ISymbolicExpressionTree tree, Func<ISymbolicExpressionTreeNode, byte> opCodeMapper, IEnumerable<Func<Instruction, Instruction>> postInstructionCompiledHooks) { 33 Dictionary<string, ushort> entryPoint = new Dictionary<string, ushort>(); 32 34 List<Instruction> code = new List<Instruction>(); 33 entryPoint.Clear();34 35 // compile main body branches 35 36 foreach (var branch in tree.Root.GetSubtree(0).Subtrees) { 36 code.AddRange(Compile(branch, opCodeMapper ));37 code.AddRange(Compile(branch, opCodeMapper, postInstructionCompiledHooks)); 37 38 } 38 39 // compile function branches … … 43 44 if (code.Count > ushort.MaxValue) throw new ArgumentException("Code for the tree is too long (> ushort.MaxValue)."); 44 45 entryPoint[branch.FunctionName] = (ushort)code.Count; 45 code.AddRange(Compile(branch.GetSubtree(0), opCodeMapper ));46 code.AddRange(Compile(branch.GetSubtree(0), opCodeMapper, postInstructionCompiledHooks)); 46 47 } 47 48 // address of all functions is fixed now … … 59 60 } 60 61 61 private IEnumerable<Instruction> Compile(ISymbolicExpressionTreeNode branch, Func<ISymbolicExpressionTreeNode, byte> opCodeMapper) {62 private static IEnumerable<Instruction> Compile(ISymbolicExpressionTreeNode branch, Func<ISymbolicExpressionTreeNode, byte> opCodeMapper, IEnumerable<Func<Instruction, Instruction>> postInstructionCompiledHooks) { 62 63 foreach (var node in branch.IterateNodesPrefix()) { 63 64 Instruction instr = new Instruction(); … … 77 78 } 78 79 } 79 80 /// <summary>81 /// Adds a function that will be called every time an instruction is compiled.82 /// The compiled will insert the instruction returned by the hook into the code.83 /// </summary>84 /// <param name="hook">The hook that should be called for each compiled instruction.</param>85 public void AddInstructionPostProcessingHook(Func<Instruction, Instruction> hook) {86 postInstructionCompiledHooks.Add(hook);87 }88 80 } 89 81 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis.Views/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis.Views-3.4.csproj
r7890 r7989 103 103 <Reference Include="HeuristicLab.MainForm.WindowsForms-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 104 104 <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 105 <Reference Include="HeuristicLab.Parameters-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 106 <Reference Include="HeuristicLab.Persistence-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 105 107 <Reference Include="HeuristicLab.PluginInfrastructure-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 106 108 <Reference Include="HeuristicLab.Visualization.ChartControlsExtensions-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis.Views/3.4/InteractiveSymbolicTimeSeriesPrognosisSolutionSimplifierView.cs
r7463 r7989 50 50 51 51 protected override void UpdateModel(ISymbolicExpressionTree tree) { 52 var model = new SymbolicTimeSeriesPrognosisModel(tree, Content.Model.Interpreter , Content.ProblemData.TargetVariables.ToArray());52 var model = new SymbolicTimeSeriesPrognosisModel(tree, Content.Model.Interpreter); 53 53 SymbolicTimeSeriesPrognosisModel.Scale(model, Content.ProblemData); 54 54 Content.Model = model; … … 66 66 protected override Dictionary<ISymbolicExpressionTreeNode, double> CalculateImpactValues(ISymbolicExpressionTree tree) { 67 67 var interpreter = Content.Model.Interpreter; 68 var rows = Content.ProblemData.TrainingIndizes; 68 69 var dataset = Content.ProblemData.Dataset; 69 var rows = Content.ProblemData.TrainingIndizes; 70 Dictionary<ISymbolicExpressionTreeNode, double> impactValues = 71 new Dictionary<ISymbolicExpressionTreeNode, double>(); 72 var originalOutput = interpreter.GetSymbolicExpressionTreeValues(tree, dataset, Content.ProblemData.TargetVariables.ToArray(), rows, 1) 73 .ToArray(); 74 int i = 0; 75 int nTargetVariables = Content.ProblemData.TargetVariables.Count(); 76 foreach (var targetVariable in Content.ProblemData.TargetVariables) { 77 List<ISymbolicExpressionTreeNode> nodes = tree.Root.GetSubtree(0).GetSubtree(i).IterateNodesPostfix().ToList(); 78 var targetValues = dataset.GetDoubleValues(targetVariable, rows); 79 OnlineCalculatorError errorState; 80 double originalR2 = OnlinePearsonsRSquaredCalculator.Calculate(targetValues, originalOutput.Skip(i).TakeEvery(nTargetVariables), out errorState); 81 if (errorState != OnlineCalculatorError.None) originalR2 = 0.0; 70 var targetVariable = Content.ProblemData.TargetVariable; 71 var targetValues = dataset.GetDoubleValues(targetVariable, rows); 72 var originalOutput = interpreter.GetSymbolicExpressionTreeValues(tree, dataset, rows).ToArray(); 82 73 83 foreach (ISymbolicExpressionTreeNode node in nodes) { 84 var parent = node.Parent; 85 constantNode.Value = CalculateReplacementValue(node, tree); 86 ISymbolicExpressionTreeNode replacementNode = constantNode; 87 SwitchNode(parent, node, replacementNode); 88 var newOutput = interpreter.GetSymbolicExpressionTreeValues(tree, dataset, Content.ProblemData.TargetVariables.ToArray(), rows, 1); 89 double newR2 = OnlinePearsonsRSquaredCalculator.Calculate(targetValues, newOutput.Skip(i).TakeEvery(nTargetVariables), out errorState); 90 if (errorState != OnlineCalculatorError.None) newR2 = 0.0; 74 Dictionary<ISymbolicExpressionTreeNode, double> impactValues = new Dictionary<ISymbolicExpressionTreeNode, double>(); 75 List<ISymbolicExpressionTreeNode> nodes = tree.Root.GetSubtree(0).GetSubtree(0).IterateNodesPostfix().ToList(); 76 OnlineCalculatorError errorState; 77 double originalR2 = OnlinePearsonsRSquaredCalculator.Calculate(targetValues, originalOutput, out errorState); 78 if (errorState != OnlineCalculatorError.None) originalR2 = 0.0; 91 79 92 // impact = 0 if no change 93 // impact < 0 if new solution is better 94 // impact > 0 if new solution is worse 95 impactValues[node] = originalR2 - newR2; 96 SwitchNode(parent, replacementNode, node); 97 } 98 i++; 80 foreach (ISymbolicExpressionTreeNode node in nodes) { 81 var parent = node.Parent; 82 constantNode.Value = CalculateReplacementValue(node, tree); 83 ISymbolicExpressionTreeNode replacementNode = constantNode; 84 SwitchNode(parent, node, replacementNode); 85 var newOutput = interpreter.GetSymbolicExpressionTreeValues(tree, dataset, rows); 86 double newR2 = OnlinePearsonsRSquaredCalculator.Calculate(targetValues, newOutput, out errorState); 87 if (errorState != OnlineCalculatorError.None) newR2 = 0.0; 88 89 // impact = 0 if no change 90 // impact < 0 if new solution is better 91 // impact > 0 if new solution is worse 92 impactValues[node] = originalR2 - newR2; 93 SwitchNode(parent, replacementNode, node); 99 94 } 100 95 return impactValues; … … 113 108 var interpreter = Content.Model.Interpreter; 114 109 var rows = Content.ProblemData.TrainingIndizes; 115 var allPrognosedValues = interpreter.GetSymbolicExpressionTreeValues(tempTree, Content.ProblemData.Dataset, Content.ProblemData.TargetVariables.ToArray(), rows, 1);110 var allPrognosedValues = interpreter.GetSymbolicExpressionTreeValues(tempTree, Content.ProblemData.Dataset, rows); 116 111 117 112 return allPrognosedValues.Median(); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis-3.4.csproj
r7886 r7989 123 123 </ItemGroup> 124 124 <ItemGroup> 125 <Compile Include="Interfaces\ISymbolicTimeSeriesPrognogisExpressionTreeInterpreter.cs" /> 125 126 <Compile Include="Interfaces\ISymbolicTimeSeriesPrognosisInterpreterOperator.cs" /> 126 127 <Compile Include="Interfaces\ISymbolicTimeSeriesPrognosisEvaluator.cs" /> … … 137 138 <Compile Include="SingleObjective\SymbolicTimeSeriesPrognosisSingleObjectiveTrainingBestSolutionAnalyzer.cs" /> 138 139 <Compile Include="SingleObjective\SymbolicTimeSeriesPrognosisSingleObjectiveValidationBestSolutionAnalyzer.cs" /> 140 <Compile Include="SymbolicTimeSeriesPrognosisExpressionTreeInterpreter.cs" /> 139 141 <Compile Include="SymbolicTimeSeriesPrognosisModel.cs" /> 140 142 <Compile Include="SymbolicTimeSeriesPrognosisSolution.cs" /> -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/Interfaces/ISymbolicTimeSeriesPrognogisExpressionTreeInterpreter.cs
r7929 r7989 21 21 22 22 using System.Collections.Generic; 23 using HeuristicLab.Core;24 23 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 25 24 26 25 namespace HeuristicLab.Problems.DataAnalysis.Symbolic { 27 public interface ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter : INamedItem { 28 IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, 29 string[] targetVariables, IEnumerable<int> rows); 30 IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, string[] targetVariables, IEnumerable<int> rows, int horizon); 26 public interface ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter : ISymbolicDataAnalysisExpressionTreeInterpreter { 27 string TargetVariable { get; set; } 28 IEnumerable<IEnumerable<double>> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows, int horizon); 31 29 } 32 30 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SingleObjective/SymbolicTimeSeriesPrognosisSingleObjectiveEvaluator.cs
r7120 r7989 21 21 22 22 23 using System; 24 using System.Collections.Generic; 23 25 using HeuristicLab.Common; 24 26 using HeuristicLab.Core; … … 27 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 30 namespace HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis { 31 [StorableClass] 29 32 public abstract class SymbolicTimeSeriesPrognosisSingleObjectiveEvaluator : SymbolicDataAnalysisSingleObjectiveEvaluator<ITimeSeriesPrognosisProblemData>, ISymbolicTimeSeriesPrognosisSingleObjectiveEvaluator { 30 33 private const string HorizonParameterName = "Horizon"; 34 private const string ApplyLinearScalingParameterName = "ApplyLinearScaling"; 31 35 36 public IFixedValueParameter<BoolValue> ApplyLinearScalingParameter { 37 get { return (IFixedValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; } 38 } 39 public bool ApplyLinearScaling { 40 get { return ApplyLinearScalingParameter.Value.Value; } 41 set { ApplyLinearScalingParameter.Value.Value = value; } 42 } 32 43 public IValueLookupParameter<IntValue> HorizonParameter { 33 44 get { return (IValueLookupParameter<IntValue>)Parameters[HorizonParameterName]; } … … 42 53 protected SymbolicTimeSeriesPrognosisSingleObjectiveEvaluator() 43 54 : base() { 55 Parameters.Add(new FixedValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the individual should be linearly scaled before evaluating.", new BoolValue(true))); 44 56 Parameters.Add(new ValueLookupParameter<IntValue>(HorizonParameterName, "The time interval for which the prognosis should be calculated.", new IntValue(1))); 57 ApplyLinearScalingParameter.Hidden = true; 58 } 59 60 61 [ThreadStatic] 62 private static double[] cache; 63 protected static void CalculateWithScaling(IEnumerable<double> targetValues, IEnumerable<double> estimatedValues, IOnlineCalculator calculator, int maxRows) { 64 if (cache == null || cache.GetLength(0) < maxRows) { 65 cache = new double[maxRows]; 66 } 67 68 //calculate linear scaling 69 //the static methods of the calculator could not be used as it performs a check if the enumerators have an equal amount of elements 70 //this is not true if the cache is used 71 int i = 0; 72 var linearScalingCalculator = new OnlineLinearScalingParameterCalculator(); 73 var targetValuesEnumerator = targetValues.GetEnumerator(); 74 var estimatedValuesEnumerator = estimatedValues.GetEnumerator(); 75 while (targetValuesEnumerator.MoveNext() && estimatedValuesEnumerator.MoveNext()) { 76 double target = targetValuesEnumerator.Current; 77 double estimated = estimatedValuesEnumerator.Current; 78 linearScalingCalculator.Add(estimated, target); 79 cache[i] = estimated; 80 i++; 81 } 82 double alpha = linearScalingCalculator.Alpha; 83 double beta = linearScalingCalculator.Beta; 84 85 //calculate the quality by using the passed online calculator 86 targetValuesEnumerator = targetValues.GetEnumerator(); 87 i = 0; 88 while (targetValuesEnumerator.MoveNext()) { 89 double target = targetValuesEnumerator.Current; 90 double estimated = cache[i] * beta + alpha; 91 calculator.Add(target, estimated); 92 i++; 93 } 45 94 } 46 95 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SingleObjective/SymbolicTimeSeriesPrognosisSingleObjectiveMeanSquaredErrorEvaluator.cs
r7183 r7989 22 22 using System; 23 23 using System.Collections.Generic; 24 using System.Drawing.Printing;25 24 using System.Linq; 26 25 using HeuristicLab.Common; … … 51 50 IEnumerable<int> rows = GenerateRowsToEvaluate(); 52 51 53 double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, 54 solution, 52 var interpreter = (ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter)SymbolicDataAnalysisTreeInterpreterParameter.ActualValue; 53 54 double quality = Calculate(interpreter, solution, 55 55 EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, 56 56 ProblemDataParameter.ActualValue, 57 rows, HorizonParameter.ActualValue.Value );57 rows, HorizonParameter.ActualValue.Value, ApplyLinearScaling); 58 58 QualityParameter.ActualValue = new DoubleValue(quality); 59 59 … … 61 61 } 62 62 63 public static double Calculate(ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, double lowerEstimationLimit, double upperEstimationLimit, ITimeSeriesPrognosisProblemData problemData, IEnumerable<int> rows, int horizon) { 64 double[] alpha; 65 double[] beta; 66 DetermineScalingFactors(solution, problemData, interpreter, rows, out alpha, out beta); 67 var scaledSolution = Scale(solution, alpha, beta); 68 string[] targetVariables = problemData.TargetVariables.ToArray(); 69 var meanSquaredErrorCalculators = Enumerable.Range(0, problemData.TargetVariables.Count()) 70 .Select(i => new OnlineMeanSquaredErrorCalculator()).ToArray(); 63 public static double Calculate(ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, double lowerEstimationLimit, double upperEstimationLimit, ITimeSeriesPrognosisProblemData problemData, IEnumerable<int> rows, int horizon, bool applyLinearScaling) { 64 IEnumerable<double> targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows.SelectMany(r => Enumerable.Range(r, horizon))); 65 IEnumerable<double> estimatedValues = interpreter.GetSymbolicExpressionTreeValues(solution, problemData.Dataset, rows, horizon).SelectMany(x => x); 66 IEnumerable<double> boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit, upperEstimationLimit); 67 OnlineCalculatorError errorState; 71 68 72 var allContinuationsEnumerator = interpreter.GetSymbolicExpressionTreeValues(scaledSolution, problemData.Dataset, 73 targetVariables, 74 rows, horizon).GetEnumerator(); 75 allContinuationsEnumerator.MoveNext(); 76 // foreach row 77 foreach (var row in rows) { 78 // foreach horizon 79 for (int h = 0; h < horizon; h++) { 80 // foreach component 81 for (int i = 0; i < meanSquaredErrorCalculators.Length; i++) { 82 double e = Math.Min(upperEstimationLimit, Math.Max(lowerEstimationLimit, allContinuationsEnumerator.Current)); 83 meanSquaredErrorCalculators[i].Add(problemData.Dataset.GetDoubleValue(targetVariables[i], row + h), e); 84 if (meanSquaredErrorCalculators[i].ErrorState == OnlineCalculatorError.InvalidValueAdded) 85 return double.MaxValue; 86 allContinuationsEnumerator.MoveNext(); 87 } 88 } 89 } 90 var meanCalculator = new OnlineMeanAndVarianceCalculator(); 91 foreach (var calc in meanSquaredErrorCalculators) { 92 if (calc.ErrorState != OnlineCalculatorError.None) return double.MaxValue; 93 meanCalculator.Add(calc.MeanSquaredError); 94 } 69 double mse; 70 if (applyLinearScaling) { 71 var mseCalculator = new OnlineMeanSquaredErrorCalculator(); 72 CalculateWithScaling(targetValues, boundedEstimatedValues, mseCalculator, problemData.Dataset.Rows); 73 errorState = mseCalculator.ErrorState; 74 mse = mseCalculator.MeanSquaredError; 75 } else 76 mse = OnlineMeanSquaredErrorCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState); 95 77 96 return meanCalculator.MeanErrorState == OnlineCalculatorError.None ? meanCalculator.Mean : double.MaxValue; 78 if (errorState != OnlineCalculatorError.None) return Double.NaN; 79 else return mse; 97 80 } 98 81 99 private static ISymbolicExpressionTree Scale(ISymbolicExpressionTree solution, double[] alpha, double[] beta) {100 var clone = (ISymbolicExpressionTree)solution.Clone();101 int n = alpha.Length;102 for (int i = 0; i < n; i++) {103 var parent = clone.Root.GetSubtree(0);104 var rpb = clone.Root.GetSubtree(0).GetSubtree(i);105 var scaledRpb = MakeSum(106 MakeProduct(rpb,107 MakeConstant(beta[i], clone.Root.Grammar), clone.Root.Grammar),108 MakeConstant(alpha[i], clone.Root.Grammar), clone.Root.Grammar);109 parent.RemoveSubtree(i);110 parent.InsertSubtree(i, scaledRpb);111 }112 return clone;113 }114 115 private static ISymbolicExpressionTreeNode MakeSum(ISymbolicExpressionTreeNode a, ISymbolicExpressionTreeNode b, ISymbolicExpressionTreeGrammar grammar) {116 var sum = grammar.Symbols.Where(s => s is Addition).First().CreateTreeNode();117 sum.AddSubtree(a);118 sum.AddSubtree(b);119 return sum;120 }121 122 private static ISymbolicExpressionTreeNode MakeProduct(ISymbolicExpressionTreeNode a, ISymbolicExpressionTreeNode b, ISymbolicExpressionTreeGrammar grammar) {123 var prod = grammar.Symbols.Where(s => s is Multiplication).First().CreateTreeNode();124 prod.AddSubtree(a);125 prod.AddSubtree(b);126 return prod;127 }128 129 private static ISymbolicExpressionTreeNode MakeConstant(double c, ISymbolicExpressionTreeGrammar grammar) {130 var node = (ConstantTreeNode)grammar.Symbols.Where(s => s is Constant).First().CreateTreeNode();131 node.Value = c;132 return node;133 }134 135 private static void DetermineScalingFactors(ISymbolicExpressionTree solution, ITimeSeriesPrognosisProblemData problemData, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter interpreter, IEnumerable<int> rows, out double[] alpha, out double[] beta) {136 string[] targetVariables = problemData.TargetVariables.ToArray();137 int nComponents = targetVariables.Length;138 alpha = new double[nComponents];139 beta = new double[nComponents];140 var oneStepPredictionsEnumerator = interpreter.GetSymbolicExpressionTreeValues(solution, problemData.Dataset, targetVariables, rows).GetEnumerator();141 var scalingParameterCalculators =142 Enumerable.Repeat(0, nComponents).Select(x => new OnlineLinearScalingParameterCalculator()).ToArray();143 var targetValues = problemData.Dataset.GetVectorEnumerable(targetVariables, rows);144 var targetValueEnumerator = targetValues.GetEnumerator();145 146 var more = oneStepPredictionsEnumerator.MoveNext() & targetValueEnumerator.MoveNext();147 while (more) {148 for (int i = 0; i < nComponents; i++) {149 scalingParameterCalculators[i].Add(oneStepPredictionsEnumerator.Current, targetValueEnumerator.Current);150 more = oneStepPredictionsEnumerator.MoveNext() & targetValueEnumerator.MoveNext();151 }152 }153 154 for (int i = 0; i < nComponents; i++) {155 if (scalingParameterCalculators[i].ErrorState == OnlineCalculatorError.None) {156 alpha[i] = scalingParameterCalculators[i].Alpha;157 beta[i] = scalingParameterCalculators[i].Beta;158 } else {159 alpha[i] = 0.0;160 beta[i] = 1.0;161 }162 }163 }164 82 165 83 public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, ITimeSeriesPrognosisProblemData problemData, IEnumerable<int> rows) { … … 168 86 HorizonParameter.ExecutionContext = context; 169 87 170 double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue , tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, HorizonParameter.ActualValue.Value);88 double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue as ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, HorizonParameter.ActualValue.Value, ApplyLinearScaling); 171 89 172 90 HorizonParameter.ExecutionContext = null; -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SingleObjective/SymbolicTimeSeriesPrognosisSingleObjectiveProblem.cs
r7843 r7989 54 54 : base(new TimeSeriesPrognosisProblemData(), new SymbolicTimeSeriesPrognosisSingleObjectiveMeanSquaredErrorEvaluator(), new SymbolicDataAnalysisExpressionTreeCreator()) { 55 55 Parameters.Add(new FixedValueParameter<DoubleLimit>(EstimationLimitsParameterName, EstimationLimitsParameterDescription)); 56 57 56 EstimationLimitsParameter.Hidden = true; 58 57 … … 61 60 MaximumSymbolicExpressionTreeLength.Value = InitialMaximumTreeLength; 62 61 62 var interpeter = new SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(); 63 interpeter.TargetVariable = ProblemData.TargetVariable; 64 SymbolicExpressionTreeInterpreter = interpeter; 65 63 66 SymbolicExpressionTreeGrammarParameter.ValueChanged += (o, e) => ConfigureGrammarSymbols(); 64 65 67 ConfigureGrammarSymbols(); 66 68 … … 88 90 protected override void OnProblemDataChanged() { 89 91 base.OnProblemDataChanged(); 92 var interpreter = SymbolicExpressionTreeInterpreter as ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter; 93 if (interpreter != null) { 94 interpreter.TargetVariable = ProblemData.TargetVariable; 95 } 90 96 UpdateEstimationLimits(); 97 91 98 } 92 99 … … 107 114 } 108 115 } 109 110 116 } 111 117 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SingleObjective/SymbolicTimeSeriesPrognosisSingleObjectiveTrainingBestSolutionAnalyzer.cs
r7183 r7989 20 20 #endregion 21 21 22 using System.Linq;23 22 using HeuristicLab.Common; 24 23 using HeuristicLab.Core; … … 76 75 77 76 protected override ISymbolicTimeSeriesPrognosisSolution CreateSolution(ISymbolicExpressionTree bestTree, double bestQuality) { 78 var model = new SymbolicTimeSeriesPrognosisModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue , ProblemDataParameter.ActualValue.TargetVariables.ToArray(), EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);77 var model = new SymbolicTimeSeriesPrognosisModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue as ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper); 79 78 if (ApplyLinearScaling.Value) 80 79 SymbolicTimeSeriesPrognosisModel.Scale(model, ProblemDataParameter.ActualValue); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SingleObjective/SymbolicTimeSeriesPrognosisSingleObjectiveValidationBestSolutionAnalyzer.cs
r7183 r7989 20 20 #endregion 21 21 22 using System.Linq;23 22 using HeuristicLab.Common; 24 23 using HeuristicLab.Core; … … 65 64 66 65 protected override ISymbolicTimeSeriesPrognosisSolution CreateSolution(ISymbolicExpressionTree bestTree, double bestQuality) { 67 var model = new SymbolicTimeSeriesPrognosisModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue , ProblemDataParameter.ActualValue.TargetVariables.ToArray(), EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);66 var model = new SymbolicTimeSeriesPrognosisModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue as ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper); 68 67 if (ApplyLinearScaling.Value) 69 68 SymbolicTimeSeriesPrognosisModel.Scale(model, ProblemDataParameter.ActualValue); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SymbolicTimeSeriesPrognosisExpressionTreeInterpreter.cs
r7949 r7989 22 22 using System; 23 23 using System.Collections.Generic; 24 using System.Linq; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; … … 28 29 using HeuristicLab.Parameters; 29 30 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 30 using System.Linq;31 31 32 32 namespace HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis { 33 33 [StorableClass] 34 34 [Item("SymbolicTimeSeriesPrognosisInterpreter", "Interpreter for symbolic expression trees including automatically defined functions.")] 35 public sealed class SymbolicTimeSeriesPrognosisInterpreter : ParameterizedNamedItem, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter { 36 private const string CheckExpressionsWithIntervalArithmeticParameterName = "CheckExpressionsWithIntervalArithmetic"; 37 #region private classes 38 private class InterpreterState { 39 private double[] argumentStack; 40 private int argumentStackPointer; 41 private Instruction[] code; 42 private int pc; 43 public int ProgramCounter { 44 get { return pc; } 45 set { pc = value; } 46 } 47 internal InterpreterState(Instruction[] code, int argumentStackSize) { 48 this.code = code; 49 this.pc = 0; 50 if (argumentStackSize > 0) { 51 this.argumentStack = new double[argumentStackSize]; 35 public sealed class SymbolicTimeSeriesPrognosisExpressionTreeInterpreter : SymbolicDataAnalysisExpressionTreeInterpreter, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter { 36 private const string TargetVariableParameterName = "TargetVariable"; 37 38 public IFixedValueParameter<StringValue> TargetVariableParameter { 39 get { return (IFixedValueParameter<StringValue>)Parameters[TargetVariableParameterName]; } 40 } 41 42 public string TargetVariable { 43 get { return TargetVariableParameter.Value.Value; } 44 set { TargetVariableParameter.Value.Value = value; } 45 } 46 47 [ThreadStatic] 48 private static double[] targetVariableCache; 49 [ThreadStatic] 50 private static List<int> invalidateCacheIndexes; 51 52 [StorableConstructor] 53 private SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(bool deserializing) : base(deserializing) { } 54 private SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(SymbolicTimeSeriesPrognosisExpressionTreeInterpreter original, Cloner cloner) : base(original, cloner) { } 55 public override IDeepCloneable Clone(Cloner cloner) { 56 return new SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(this, cloner); 57 } 58 59 public SymbolicTimeSeriesPrognosisExpressionTreeInterpreter() 60 : base("SymbolicTimeSeriesPrognosisInterpreter", "Interpreter for symbolic expression trees including automatically defined functions.") { 61 Parameters.Add(new FixedValueParameter<StringValue>(TargetVariableParameterName)); 62 TargetVariableParameter.Hidden = true; 63 } 64 65 // for each row several (=#horizon) future predictions 66 public IEnumerable<IEnumerable<double>> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows, int horizon) { 67 return GetSymbolicExpressionTreeValues(tree, dataset, rows, rows.Select(row => horizon)); 68 } 69 70 public IEnumerable<IEnumerable<double>> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows, IEnumerable<int> horizons) { 71 if (CheckExpressionsWithIntervalArithmetic.Value) 72 throw new NotSupportedException("Interval arithmetic is not yet supported in the symbolic data analysis interpreter."); 73 if (targetVariableCache == null || targetVariableCache.GetLength(0) < dataset.Rows) 74 targetVariableCache = dataset.GetDoubleValues(TargetVariable).ToArray(); 75 if (invalidateCacheIndexes == null) 76 invalidateCacheIndexes = new List<int>(10); 77 78 string targetVariable = TargetVariable; 79 EvaluatedSolutions.Value++; // increment the evaluated solutions counter 80 var state = PrepareInterpreterState(tree, dataset, targetVariableCache); 81 var rowsEnumerator = rows.GetEnumerator(); 82 var horizonsEnumerator = horizons.GetEnumerator(); 83 84 // produce a n-step forecast for all rows 85 while (rowsEnumerator.MoveNext() & horizonsEnumerator.MoveNext()) { 86 int row = rowsEnumerator.Current; 87 int horizon = horizonsEnumerator.Current; 88 89 double[] vProgs = new double[horizon]; 90 for (int i = 0; i < horizon; i++) { 91 int localRow = i + row; // create a local variable for the ref parameter 92 vProgs[i] = Evaluate(dataset, ref localRow, state); 93 targetVariableCache[localRow] = vProgs[i]; 94 invalidateCacheIndexes.Add(localRow); 95 state.Reset(); 52 96 } 53 this.argumentStackPointer = 0; 97 98 yield return vProgs; 99 100 int j = 0; 101 foreach (var targetValue in dataset.GetDoubleValues(TargetVariable, invalidateCacheIndexes)) { 102 targetVariableCache[invalidateCacheIndexes[j]] = targetValue; 103 j++; 104 } 105 invalidateCacheIndexes.Clear(); 54 106 } 55 107 56 internal void Reset() { 57 this.pc = 0; 58 this.argumentStackPointer = 0; 59 } 60 61 internal Instruction NextInstruction() { 62 return code[pc++]; 63 } 64 private void Push(double val) { 65 argumentStack[argumentStackPointer++] = val; 66 } 67 private double Pop() { 68 return argumentStack[--argumentStackPointer]; 69 } 70 71 internal void CreateStackFrame(double[] argValues) { 72 // push in reverse order to make indexing easier 73 for (int i = argValues.Length - 1; i >= 0; i--) { 74 argumentStack[argumentStackPointer++] = argValues[i]; 75 } 76 Push(argValues.Length); 77 } 78 79 internal void RemoveStackFrame() { 80 int size = (int)Pop(); 81 argumentStackPointer -= size; 82 } 83 84 internal double GetStackFrameValue(ushort index) { 85 // layout of stack: 86 // [0] <- argumentStackPointer 87 // [StackFrameSize = N + 1] 88 // [Arg0] <- argumentStackPointer - 2 - 0 89 // [Arg1] <- argumentStackPointer - 2 - 1 90 // [...] 91 // [ArgN] <- argumentStackPointer - 2 - N 92 // <Begin of stack frame> 93 return argumentStack[argumentStackPointer - index - 2]; 94 } 95 } 96 private class OpCodes { 97 public const byte Add = 1; 98 public const byte Sub = 2; 99 public const byte Mul = 3; 100 public const byte Div = 4; 101 102 public const byte Sin = 5; 103 public const byte Cos = 6; 104 public const byte Tan = 7; 105 106 public const byte Log = 8; 107 public const byte Exp = 9; 108 109 public const byte IfThenElse = 10; 110 111 public const byte GT = 11; 112 public const byte LT = 12; 113 114 public const byte AND = 13; 115 public const byte OR = 14; 116 public const byte NOT = 15; 117 118 119 public const byte Average = 16; 120 121 public const byte Call = 17; 122 123 public const byte Variable = 18; 124 public const byte LagVariable = 19; 125 public const byte Constant = 20; 126 public const byte Arg = 21; 127 128 public const byte Power = 22; 129 public const byte Root = 23; 130 public const byte TimeLag = 24; 131 public const byte Integral = 25; 132 public const byte Derivative = 26; 133 134 public const byte VariableCondition = 27; 135 } 136 #endregion 137 138 private Dictionary<Type, byte> symbolToOpcode = new Dictionary<Type, byte>() { 139 { typeof(Addition), OpCodes.Add }, 140 { typeof(Subtraction), OpCodes.Sub }, 141 { typeof(Multiplication), OpCodes.Mul }, 142 { typeof(Division), OpCodes.Div }, 143 { typeof(Sine), OpCodes.Sin }, 144 { typeof(Cosine), OpCodes.Cos }, 145 { typeof(Tangent), OpCodes.Tan }, 146 { typeof(Logarithm), OpCodes.Log }, 147 { typeof(Exponential), OpCodes.Exp }, 148 { typeof(IfThenElse), OpCodes.IfThenElse }, 149 { typeof(GreaterThan), OpCodes.GT }, 150 { typeof(LessThan), OpCodes.LT }, 151 { typeof(And), OpCodes.AND }, 152 { typeof(Or), OpCodes.OR }, 153 { typeof(Not), OpCodes.NOT}, 154 { typeof(Average), OpCodes.Average}, 155 { typeof(InvokeFunction), OpCodes.Call }, 156 { typeof(HeuristicLab.Problems.DataAnalysis.Symbolic.Variable), OpCodes.Variable }, 157 { typeof(LaggedVariable), OpCodes.LagVariable }, 158 { typeof(Constant), OpCodes.Constant }, 159 { typeof(Argument), OpCodes.Arg }, 160 { typeof(Power),OpCodes.Power}, 161 { typeof(Root),OpCodes.Root}, 162 { typeof(TimeLag), OpCodes.TimeLag}, 163 { typeof(Integral), OpCodes.Integral}, 164 { typeof(Derivative), OpCodes.Derivative}, 165 { typeof(VariableCondition),OpCodes.VariableCondition} 166 }; 167 168 public override bool CanChangeName { 169 get { return false; } 170 } 171 public override bool CanChangeDescription { 172 get { return false; } 108 if (rowsEnumerator.MoveNext() || horizonsEnumerator.MoveNext()) 109 throw new ArgumentException("Number of elements in rows and horizon enumerations doesn't match."); 173 110 } 174 111 175 #region parameter properties 176 public IValueParameter<BoolValue> CheckExpressionsWithIntervalArithmeticParameter { 177 get { return (IValueParameter<BoolValue>)Parameters[CheckExpressionsWithIntervalArithmeticParameterName]; } 178 } 179 #endregion 180 181 #region properties 182 public BoolValue CheckExpressionsWithIntervalArithmetic { 183 get { return CheckExpressionsWithIntervalArithmeticParameter.Value; } 184 set { CheckExpressionsWithIntervalArithmeticParameter.Value = value; } 185 } 186 187 [Storable] 188 private readonly string[] targetVariables; 189 #endregion 190 191 192 [StorableConstructor] 193 private SymbolicTimeSeriesPrognosisInterpreter(bool deserializing) : base(deserializing) { } 194 private SymbolicTimeSeriesPrognosisInterpreter(SymbolicTimeSeriesPrognosisInterpreter original, Cloner cloner) 195 : base(original, cloner) { 196 this.targetVariables = original.targetVariables; 197 } 198 public override IDeepCloneable Clone(Cloner cloner) { 199 return new SymbolicTimeSeriesPrognosisInterpreter(this, cloner); 200 } 201 202 public SymbolicTimeSeriesPrognosisInterpreter(string[] targetVariables) 203 : base("SymbolicTimeSeriesPrognosisInterpreter", "Interpreter for symbolic expression trees including automatically defined functions.") { 204 Parameters.Add(new ValueParameter<BoolValue>(CheckExpressionsWithIntervalArithmeticParameterName, "Switch that determines if the interpreter checks the validity of expressions with interval arithmetic before evaluating the expression.", new BoolValue(false))); 205 this.targetVariables = targetVariables; 206 } 207 208 public IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows) { 209 throw new NotSupportedException(); 210 } 211 212 // for each row for each target variable one prognosis (=enumerable of future values) 213 public IEnumerable<IEnumerable<IEnumerable<double>>> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows, int horizon) { 214 if (CheckExpressionsWithIntervalArithmetic.Value) 215 throw new NotSupportedException("Interval arithmetic is not yet supported in the symbolic data analysis interpreter."); 216 var compiler = new SymbolicExpressionTreeCompiler(); 217 Instruction[] code = compiler.Compile(tree, MapSymbolToOpCode); 112 private InterpreterState PrepareInterpreterState(ISymbolicExpressionTree tree, Dataset dataset, double[] targetVariableCache) { 113 Instruction[] code = SymbolicExpressionTreeCompiler.Compile(tree, OpCodes.MapSymbolToOpCode); 218 114 int necessaryArgStackSize = 0; 219 for (int i = 0; i < code.Length; i++) { 220 Instruction instr = code[i]; 115 foreach (Instruction instr in code) { 221 116 if (instr.opCode == OpCodes.Variable) { 222 var variableTreeNode = instr.dynamicNode as VariableTreeNode; 223 instr.iArg0 = dataset.GetReadOnlyDoubleValues(variableTreeNode.VariableName); 224 code[i] = instr; 117 var variableTreeNode = (VariableTreeNode)instr.dynamicNode; 118 if (variableTreeNode.VariableName == TargetVariable) 119 instr.iArg0 = targetVariableCache; 120 else 121 instr.iArg0 = dataset.GetReadOnlyDoubleValues(variableTreeNode.VariableName); 225 122 } else if (instr.opCode == OpCodes.LagVariable) { 226 var laggedVariableTreeNode = instr.dynamicNode as LaggedVariableTreeNode;123 var laggedVariableTreeNode = (LaggedVariableTreeNode)instr.dynamicNode; 227 124 instr.iArg0 = dataset.GetReadOnlyDoubleValues(laggedVariableTreeNode.VariableName); 228 code[i] = instr;229 125 } else if (instr.opCode == OpCodes.VariableCondition) { 230 var variableConditionTreeNode = instr.dynamicNode as VariableConditionTreeNode;126 var variableConditionTreeNode = (VariableConditionTreeNode)instr.dynamicNode; 231 127 instr.iArg0 = dataset.GetReadOnlyDoubleValues(variableConditionTreeNode.VariableName); 232 128 } else if (instr.opCode == OpCodes.Call) { … … 234 130 } 235 131 } 236 var state = new InterpreterState(code, necessaryArgStackSize);237 132 238 int nComponents = tree.Root.GetSubtree(0).SubtreeCount; 239 // produce a n-step forecast for each target variable for all rows 240 var cachedPrognosedValues = new Dictionary<string, double[]>(); 241 foreach (var targetVariable in targetVariables) 242 cachedPrognosedValues[targetVariable] = new double[horizon]; 243 foreach (var rowEnum in rows) { 244 int row = rowEnum; 245 List<double[]> vProgs = new List<double[]>(); 246 foreach (var horizonRow in Enumerable.Range(row, horizon)) { 247 int localRow = horizonRow; // create a local variable for the ref parameter 248 var vPrognosis = from i in Enumerable.Range(0, nComponents) 249 select Evaluate(dataset, ref localRow, row - 1, state, cachedPrognosedValues); 250 251 var vPrognosisArr = vPrognosis.ToArray(); 252 vProgs.Add(vPrognosisArr); 253 // set cachedValues for prognosis of future values 254 for (int i = 0; i < vPrognosisArr.Length; i++) 255 cachedPrognosedValues[targetVariables[i]][horizonRow - row] = vPrognosisArr[i]; 256 257 state.Reset(); 258 } 259 260 yield return from component in Enumerable.Range(0, nComponents) 261 select from prognosisStep in Enumerable.Range(0, vProgs.Count) 262 select vProgs[prognosisStep][component]; 263 } 264 } 265 266 private double Evaluate(Dataset dataset, ref int row, int lastObservedRow, InterpreterState state, Dictionary<string, double[]> cachedPrognosedValues) { 267 Instruction currentInstr = state.NextInstruction(); 268 switch (currentInstr.opCode) { 269 case OpCodes.Add: { 270 double s = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 271 for (int i = 1; i < currentInstr.nArguments; i++) { 272 s += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 273 } 274 return s; 275 } 276 case OpCodes.Sub: { 277 double s = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 278 for (int i = 1; i < currentInstr.nArguments; i++) { 279 s -= Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 280 } 281 if (currentInstr.nArguments == 1) s = -s; 282 return s; 283 } 284 case OpCodes.Mul: { 285 double p = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 286 for (int i = 1; i < currentInstr.nArguments; i++) { 287 p *= Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 288 } 289 return p; 290 } 291 case OpCodes.Div: { 292 double p = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 293 for (int i = 1; i < currentInstr.nArguments; i++) { 294 p /= Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 295 } 296 if (currentInstr.nArguments == 1) p = 1.0 / p; 297 return p; 298 } 299 case OpCodes.Average: { 300 double sum = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 301 for (int i = 1; i < currentInstr.nArguments; i++) { 302 sum += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 303 } 304 return sum / currentInstr.nArguments; 305 } 306 case OpCodes.Cos: { 307 return Math.Cos(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 308 } 309 case OpCodes.Sin: { 310 return Math.Sin(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 311 } 312 case OpCodes.Tan: { 313 return Math.Tan(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 314 } 315 case OpCodes.Power: { 316 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 317 double y = Math.Round(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 318 return Math.Pow(x, y); 319 } 320 case OpCodes.Root: { 321 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 322 double y = Math.Round(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 323 return Math.Pow(x, 1 / y); 324 } 325 case OpCodes.Exp: { 326 return Math.Exp(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 327 } 328 case OpCodes.Log: { 329 return Math.Log(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues)); 330 } 331 case OpCodes.IfThenElse: { 332 double condition = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 333 double result; 334 if (condition > 0.0) { 335 result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); SkipInstructions(state); 336 } else { 337 SkipInstructions(state); result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 338 } 339 return result; 340 } 341 case OpCodes.AND: { 342 double result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 343 for (int i = 1; i < currentInstr.nArguments; i++) { 344 if (result > 0.0) result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 345 else { 346 SkipInstructions(state); 347 } 348 } 349 return result > 0.0 ? 1.0 : -1.0; 350 } 351 case OpCodes.OR: { 352 double result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 353 for (int i = 1; i < currentInstr.nArguments; i++) { 354 if (result <= 0.0) result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 355 else { 356 SkipInstructions(state); 357 } 358 } 359 return result > 0.0 ? 1.0 : -1.0; 360 } 361 case OpCodes.NOT: { 362 return Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues) > 0.0 ? -1.0 : 1.0; 363 } 364 case OpCodes.GT: { 365 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 366 double y = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 367 if (x > y) return 1.0; 368 else return -1.0; 369 } 370 case OpCodes.LT: { 371 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 372 double y = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 373 if (x < y) return 1.0; 374 else return -1.0; 375 } 376 case OpCodes.TimeLag: { 377 var timeLagTreeNode = (LaggedTreeNode)currentInstr.dynamicNode; 378 row += timeLagTreeNode.Lag; 379 double result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 380 row -= timeLagTreeNode.Lag; 381 return result; 382 } 383 case OpCodes.Integral: { 384 int savedPc = state.ProgramCounter; 385 var timeLagTreeNode = (LaggedTreeNode)currentInstr.dynamicNode; 386 double sum = 0.0; 387 for (int i = 0; i < Math.Abs(timeLagTreeNode.Lag); i++) { 388 row += Math.Sign(timeLagTreeNode.Lag); 389 sum += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 390 state.ProgramCounter = savedPc; 391 } 392 row -= timeLagTreeNode.Lag; 393 sum += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 394 return sum; 395 } 396 397 //mkommend: derivate calculation taken from: 398 //http://www.holoborodko.com/pavel/numerical-methods/numerical-derivative/smooth-low-noise-differentiators/ 399 //one sided smooth differentiatior, N = 4 400 // y' = 1/8h (f_i + 2f_i-1, -2 f_i-3 - f_i-4) 401 case OpCodes.Derivative: { 402 int savedPc = state.ProgramCounter; 403 double f_0 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); row--; 404 state.ProgramCounter = savedPc; 405 double f_1 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); row -= 2; 406 state.ProgramCounter = savedPc; 407 double f_3 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); row--; 408 state.ProgramCounter = savedPc; 409 double f_4 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 410 row += 4; 411 412 return (f_0 + 2 * f_1 - 2 * f_3 - f_4) / 8; // h = 1 413 } 414 case OpCodes.Call: { 415 // evaluate sub-trees 416 double[] argValues = new double[currentInstr.nArguments]; 417 for (int i = 0; i < currentInstr.nArguments; i++) { 418 argValues[i] = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 419 } 420 // push on argument values on stack 421 state.CreateStackFrame(argValues); 422 423 // save the pc 424 int savedPc = state.ProgramCounter; 425 // set pc to start of function 426 state.ProgramCounter = (ushort)currentInstr.iArg0; 427 // evaluate the function 428 double v = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 429 430 // delete the stack frame 431 state.RemoveStackFrame(); 432 433 // restore the pc => evaluation will continue at point after my subtrees 434 state.ProgramCounter = savedPc; 435 return v; 436 } 437 case OpCodes.Arg: { 438 return state.GetStackFrameValue((ushort)currentInstr.iArg0); 439 } 440 case OpCodes.Variable: { 441 if (row < 0 || row >= dataset.Rows) 442 return double.NaN; 443 var variableTreeNode = (VariableTreeNode)currentInstr.dynamicNode; 444 if (row <= lastObservedRow) return ((IList<double>)currentInstr.iArg0)[row] * variableTreeNode.Weight; 445 else return cachedPrognosedValues[variableTreeNode.VariableName][row - lastObservedRow - 1] * variableTreeNode.Weight; 446 } 447 case OpCodes.LagVariable: { 448 var laggedVariableTreeNode = (LaggedVariableTreeNode)currentInstr.dynamicNode; 449 int actualRow = row + laggedVariableTreeNode.Lag; 450 if (actualRow < 0 || actualRow >= dataset.Rows) 451 return double.NaN; 452 if (actualRow <= lastObservedRow) return ((IList<double>)currentInstr.iArg0)[actualRow] * laggedVariableTreeNode.Weight; 453 else return cachedPrognosedValues[laggedVariableTreeNode.VariableName][actualRow - lastObservedRow - 1] * laggedVariableTreeNode.Weight; 454 } 455 case OpCodes.Constant: { 456 var constTreeNode = currentInstr.dynamicNode as ConstantTreeNode; 457 return constTreeNode.Value; 458 } 459 460 //mkommend: this symbol uses the logistic function f(x) = 1 / (1 + e^(-alpha * x) ) 461 //to determine the relative amounts of the true and false branch see http://en.wikipedia.org/wiki/Logistic_function 462 case OpCodes.VariableCondition: { 463 if (row < 0 || row >= dataset.Rows) 464 return double.NaN; 465 var variableConditionTreeNode = (VariableConditionTreeNode)currentInstr.dynamicNode; 466 double variableValue; 467 if (row <= lastObservedRow) 468 variableValue = ((IList<double>)currentInstr.iArg0)[row]; 469 else 470 variableValue = cachedPrognosedValues[variableConditionTreeNode.VariableName][row - lastObservedRow - 1]; 471 472 double x = variableValue - variableConditionTreeNode.Threshold; 473 double p = 1 / (1 + Math.Exp(-variableConditionTreeNode.Slope * x)); 474 475 double trueBranch = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 476 double falseBranch = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); 477 478 return trueBranch * p + falseBranch * (1 - p); 479 } 480 default: throw new NotSupportedException(); 481 } 482 } 483 484 private byte MapSymbolToOpCode(ISymbolicExpressionTreeNode treeNode) { 485 if (symbolToOpcode.ContainsKey(treeNode.Symbol.GetType())) 486 return symbolToOpcode[treeNode.Symbol.GetType()]; 487 else 488 throw new NotSupportedException("Symbol: " + treeNode.Symbol); 489 } 490 491 // skips a whole branch 492 private void SkipInstructions(InterpreterState state) { 493 int i = 1; 494 while (i > 0) { 495 i += state.NextInstruction().nArguments; 496 i--; 497 } 133 return new InterpreterState(code, necessaryArgStackSize); 498 134 } 499 135 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SymbolicTimeSeriesPrognosisModel.cs
r7183 r7989 20 20 #endregion 21 21 22 using System;23 22 using System.Collections.Generic; 24 23 using System.Drawing; … … 64 63 #endregion 65 64 66 [Storable]67 private string[] targetVariables;68 69 70 65 [StorableConstructor] 71 66 protected SymbolicTimeSeriesPrognosisModel(bool deserializing) : base(deserializing) { } … … 74 69 this.symbolicExpressionTree = cloner.Clone(original.symbolicExpressionTree); 75 70 this.interpreter = cloner.Clone(original.interpreter); 76 this.targetVariables = new string[original.targetVariables.Length];77 Array.Copy(original.targetVariables, this.targetVariables, this.targetVariables.Length);78 71 this.lowerEstimationLimit = original.lowerEstimationLimit; 79 72 this.upperEstimationLimit = original.upperEstimationLimit; 80 73 } 81 public SymbolicTimeSeriesPrognosisModel(ISymbolicExpressionTree tree, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter interpreter, IEnumerable<string> targetVariables,double lowerLimit = double.MinValue, double upperLimit = double.MaxValue)74 public SymbolicTimeSeriesPrognosisModel(ISymbolicExpressionTree tree, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter interpreter, double lowerLimit = double.MinValue, double upperLimit = double.MaxValue) 82 75 : base() { 83 76 this.name = ItemName; 84 77 this.description = ItemDescription; 85 78 this.symbolicExpressionTree = tree; 86 this.interpreter = interpreter; this.targetVariables = targetVariables.ToArray();79 this.interpreter = interpreter; 87 80 this.lowerEstimationLimit = lowerLimit; 88 81 this.upperEstimationLimit = upperLimit; … … 93 86 } 94 87 95 public IEnumerable<IEnumerable<IEnumerable<double>>> GetPrognosedValues(Dataset dataset, IEnumerable<int> rows, int horizon) { 96 var enumerator = 97 Interpreter.GetSymbolicExpressionTreeValues(SymbolicExpressionTree, dataset, targetVariables, rows, horizon). 98 GetEnumerator(); 99 foreach (var r in rows) { 100 var l = new List<double[]>(); 101 for (int h = 0; h < horizon; h++) { 102 double[] components = new double[targetVariables.Length]; 103 for (int c = 0; c < components.Length; c++) { 104 enumerator.MoveNext(); 105 components[c] = Math.Min(upperEstimationLimit, Math.Max(lowerEstimationLimit, enumerator.Current)); 106 } 107 l.Add(components); 108 } 109 yield return l; 110 } 88 public IEnumerable<IEnumerable<double>> GetPrognosedValues(Dataset dataset, IEnumerable<int> rows, int horizon) { 89 var estimatedValues = Interpreter.GetSymbolicExpressionTreeValues(SymbolicExpressionTree, dataset, rows, horizon); 90 return estimatedValues.Select(predictionPerRow => predictionPerRow.LimitToRange(lowerEstimationLimit, upperEstimationLimit)); 111 91 } 112 92 … … 120 100 public static void Scale(SymbolicTimeSeriesPrognosisModel model, ITimeSeriesPrognosisProblemData problemData) { 121 101 var dataset = problemData.Dataset; 122 var targetVariable s = problemData.TargetVariables.ToArray();102 var targetVariable = problemData.TargetVariable; 123 103 var rows = problemData.TrainingIndizes; 124 var estimatedValuesEnumerator = model.Interpreter.GetSymbolicExpressionTreeValues(model.SymbolicExpressionTree, dataset, 125 targetVariables, 126 rows).GetEnumerator(); 127 var scalingParameterCalculators = 128 problemData.TargetVariables.Select(v => new OnlineLinearScalingParameterCalculator()).ToArray(); 129 var targetValuesEnumerator = problemData.Dataset.GetVectorEnumerable(targetVariables, rows).GetEnumerator(); 104 var estimatedValuesEnumerator = model.Interpreter.GetSymbolicExpressionTreeValues(model.SymbolicExpressionTree, dataset, rows); 105 var targetValuesEnumerator = problemData.Dataset.GetDoubleValues(targetVariable, rows); 130 106 131 var more = targetValuesEnumerator.MoveNext() & estimatedValuesEnumerator.MoveNext(); 132 // foreach row 133 while (more) { 134 // foreach component 135 for (int i = 0; i < targetVariables.Length; i++) { 136 scalingParameterCalculators[i].Add(estimatedValuesEnumerator.Current, targetValuesEnumerator.Current); 137 more = estimatedValuesEnumerator.MoveNext() & targetValuesEnumerator.MoveNext(); 107 double alpha, beta; 108 OnlineCalculatorError error; 109 OnlineLinearScalingParameterCalculator.Calculate(estimatedValuesEnumerator, targetValuesEnumerator, out alpha, out beta, out error); 110 if (error != OnlineCalculatorError.None) return; 111 112 ConstantTreeNode alphaTreeNode = null; 113 ConstantTreeNode betaTreeNode = null; 114 // check if model has been scaled previously by analyzing the structure of the tree 115 var startNode = model.SymbolicExpressionTree.Root.GetSubtree(0); 116 if (startNode.GetSubtree(0).Symbol is Addition) { 117 var addNode = startNode.GetSubtree(0); 118 if (addNode.SubtreeCount == 2 && addNode.GetSubtree(0).Symbol is Multiplication && addNode.GetSubtree(1).Symbol is Constant) { 119 alphaTreeNode = addNode.GetSubtree(1) as ConstantTreeNode; 120 var mulNode = addNode.GetSubtree(0); 121 if (mulNode.SubtreeCount == 2 && mulNode.GetSubtree(1).Symbol is Constant) { 122 betaTreeNode = mulNode.GetSubtree(1) as ConstantTreeNode; 123 } 138 124 } 139 125 } 140 141 for (int i = 0; i < targetVariables.Count(); i++) { 142 if (scalingParameterCalculators[i].ErrorState != OnlineCalculatorError.None) continue; 143 double alpha = scalingParameterCalculators[i].Alpha; 144 double beta = scalingParameterCalculators[i].Beta; 145 ConstantTreeNode alphaTreeNode = null; 146 ConstantTreeNode betaTreeNode = null; 147 // check if model has been scaled previously by analyzing the structure of the tree 148 var startNode = model.SymbolicExpressionTree.Root.GetSubtree(0); 149 if (startNode.GetSubtree(i).Symbol is Addition) { 150 var addNode = startNode.GetSubtree(i); 151 if (addNode.SubtreeCount == 2 && addNode.GetSubtree(0).Symbol is Multiplication && 152 addNode.GetSubtree(1).Symbol is Constant) { 153 alphaTreeNode = addNode.GetSubtree(1) as ConstantTreeNode; 154 var mulNode = addNode.GetSubtree(0); 155 if (mulNode.SubtreeCount == 2 && mulNode.GetSubtree(1).Symbol is Constant) { 156 betaTreeNode = mulNode.GetSubtree(1) as ConstantTreeNode; 157 } 158 } 159 } 160 // if tree structure matches the structure necessary for linear scaling then reuse the existing tree nodes 161 if (alphaTreeNode != null && betaTreeNode != null) { 162 betaTreeNode.Value *= beta; 163 alphaTreeNode.Value *= beta; 164 alphaTreeNode.Value += alpha; 165 } else { 166 var mainBranch = startNode.GetSubtree(i); 167 startNode.RemoveSubtree(i); 168 var scaledMainBranch = MakeSum(MakeProduct(mainBranch, beta), alpha); 169 startNode.InsertSubtree(i, scaledMainBranch); 170 } 171 } // foreach 126 // if tree structure matches the structure necessary for linear scaling then reuse the existing tree nodes 127 if (alphaTreeNode != null && betaTreeNode != null) { 128 betaTreeNode.Value *= beta; 129 alphaTreeNode.Value *= beta; 130 alphaTreeNode.Value += alpha; 131 } else { 132 var mainBranch = startNode.GetSubtree(0); 133 startNode.RemoveSubtree(0); 134 var scaledMainBranch = MakeSum(MakeProduct(mainBranch, beta), alpha); 135 startNode.AddSubtree(scaledMainBranch); 136 } 172 137 } 173 138 -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Evaluators/SymbolicDataAnalysisEvaluator.cs
r7615 r7989 96 96 [StorableHook(HookType.AfterDeserialization)] 97 97 private void AfterDeserialization() { 98 if (!Parameters.ContainsKey(ValidRowIndicatorParameterName))98 if (!Parameters.ContainsKey(ValidRowIndicatorParameterName)) 99 99 Parameters.Add(new ValueLookupParameter<StringValue>(ValidRowIndicatorParameterName, "An indicator variable in the data set that specifies which rows should be evaluated (those for which the indicator <> 0) (optional).")); 100 100 } … … 104 104 } 105 105 106 protected IEnumerable<int> GenerateRowsToEvaluate(double percentageOfRows) 107 { 106 protected IEnumerable<int> GenerateRowsToEvaluate(double percentageOfRows) { 108 107 109 108 IEnumerable<int> rows; … … 112 111 int testPartitionStart = ProblemDataParameter.ActualValue.TestPartition.Start; 113 112 int testPartitionEnd = ProblemDataParameter.ActualValue.TestPartition.End; 114 115 113 if (samplesEnd < samplesStart) throw new ArgumentException("Start value is larger than end value."); 116 114 … … 125 123 126 124 rows = rows.Where(i => i < testPartitionStart || testPartitionEnd <= i); 127 128 if(ValidRowIndicatorParameter.ActualValue != null) 129 { 125 if (ValidRowIndicatorParameter.ActualValue != null) { 130 126 string indicatorVar = ValidRowIndicatorParameter.ActualValue.Value; 131 127 var problemData = ProblemDataParameter.ActualValue; 132 128 var indicatorRow = problemData.Dataset.GetReadOnlyDoubleValues(indicatorVar); 133 rows = rows.Where(r =>!indicatorRow[r].IsAlmost(0.0));129 rows = rows.Where(r => !indicatorRow[r].IsAlmost(0.0)); 134 130 } 135 131 return rows; -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.csproj
r7930 r7989 150 150 <Compile Include="Creators\SymbolicDataAnalysisExpressionRampedHalfAndHalfTreeCreator.cs" /> 151 151 <Compile Include="Creators\SymbolicDataAnalysisExpressionTreeCreator.cs" /> 152 <Compile Include="Interfaces\ISymbolicTimeSeriesPrognogisExpressionTreeInterpreter.cs" />153 152 <Compile Include="Crossovers\MultiSymbolicDataAnalysisExpressionCrossover.cs" /> 154 153 <Compile Include="Crossovers\SymbolicDataAnalysisExpressionContextAwareCrossover.cs" /> … … 178 177 <Compile Include="Interfaces\ISymbolicDataAnalysisAnalyzer.cs" /> 179 178 <Compile Include="SymbolicDataAnalysisSingleObjectiveProblem.cs" /> 180 <Compile Include="Interpreter\old_SymbolicDataAnalysisExpressionTreeInterpreter.cs" />181 179 <Compile Include="SymbolicDataAnalysisExpressionTreeSimplifier.cs" /> 182 180 <Compile Include="SymbolicDataAnalysisProblem.cs" /> -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interfaces/ISymbolicDataAnalysisExpressionTreeInterpreter.cs
r7615 r7989 26 26 27 27 namespace HeuristicLab.Problems.DataAnalysis.Symbolic { 28 public interface ISymbolicDataAnalysisExpressionTreeInterpreter : I SymbolicTimeSeriesPrognosisExpressionTreeInterpreter, IStatefulItem {28 public interface ISymbolicDataAnalysisExpressionTreeInterpreter : INamedItem, IStatefulItem { 29 29 IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows); 30 30 IntValue EvaluatedSolutions { get; set; } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/InterpreterState.cs
r7930 r7989 23 23 24 24 namespace HeuristicLab.Problems.DataAnalysis.Symbolic { 25 internalclass InterpreterState {25 public class InterpreterState { 26 26 private double[] argumentStack; 27 27 private int argumentStackPointer; … … 32 32 set { pc = value; } 33 33 } 34 internalInterpreterState(Instruction[] code, int argumentStackSize) {34 public InterpreterState(Instruction[] code, int argumentStackSize) { 35 35 this.code = code; 36 36 this.pc = 0; … … 41 41 } 42 42 43 internalvoid Reset() {43 public void Reset() { 44 44 this.pc = 0; 45 45 this.argumentStackPointer = 0; 46 46 } 47 47 48 internalInstruction NextInstruction() {48 public Instruction NextInstruction() { 49 49 return code[pc++]; 50 50 } 51 51 // skips a whole branch 52 internalvoid SkipInstructions() {52 public void SkipInstructions() { 53 53 int i = 1; 54 54 while (i > 0) { … … 65 65 } 66 66 67 internalvoid CreateStackFrame(double[] argValues) {67 public void CreateStackFrame(double[] argValues) { 68 68 // push in reverse order to make indexing easier 69 69 for (int i = argValues.Length - 1; i >= 0; i--) { … … 73 73 } 74 74 75 internalvoid RemoveStackFrame() {75 public void RemoveStackFrame() { 76 76 int size = (int)Pop(); 77 77 argumentStackPointer -= size; 78 78 } 79 79 80 internaldouble GetStackFrameValue(ushort index) {80 public double GetStackFrameValue(ushort index) { 81 81 // layout of stack: 82 82 // [0] <- argumentStackPointer -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/OpCodes.cs
r7930 r7989 25 25 26 26 namespace HeuristicLab.Problems.DataAnalysis.Symbolic { 27 internalstatic class OpCodes {27 public static class OpCodes { 28 28 public const byte Add = 1; 29 29 public const byte Sub = 2; … … 132 132 public static byte MapSymbolToOpCode(ISymbolicExpressionTreeNode treeNode) { 133 133 byte opCode; 134 if (symbolToOpcode.TryGetValue(treeNode.Symbol.GetType(), out opCode)) 135 return opCode; 136 else 137 throw new NotSupportedException("Symbol: " + treeNode.Symbol); 134 if (symbolToOpcode.TryGetValue(treeNode.Symbol.GetType(), out opCode)) return opCode; 135 else throw new NotSupportedException("Symbol: " + treeNode.Symbol); 138 136 } 139 137 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/SymbolicDataAnalysisExpressionTreeInterpreter.cs
r7930 r7989 32 32 [StorableClass] 33 33 [Item("SymbolicDataAnalysisExpressionTreeInterpreter", "Interpreter for symbolic expression trees including automatically defined functions.")] 34 public sealed class SymbolicDataAnalysisExpressionTreeInterpreter : ParameterizedNamedItem, 35 ISymbolicDataAnalysisExpressionTreeInterpreter, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter { 34 public class SymbolicDataAnalysisExpressionTreeInterpreter : ParameterizedNamedItem, ISymbolicDataAnalysisExpressionTreeInterpreter { 36 35 private const string CheckExpressionsWithIntervalArithmeticParameterName = "CheckExpressionsWithIntervalArithmetic"; 37 36 private const string EvaluatedSolutionsParameterName = "EvaluatedSolutions"; … … 67 66 68 67 [StorableConstructor] 69 pr ivateSymbolicDataAnalysisExpressionTreeInterpreter(bool deserializing) : base(deserializing) { }70 pr ivateSymbolicDataAnalysisExpressionTreeInterpreter(SymbolicDataAnalysisExpressionTreeInterpreter original, Cloner cloner) : base(original, cloner) { }68 protected SymbolicDataAnalysisExpressionTreeInterpreter(bool deserializing) : base(deserializing) { } 69 protected SymbolicDataAnalysisExpressionTreeInterpreter(SymbolicDataAnalysisExpressionTreeInterpreter original, Cloner cloner) : base(original, cloner) { } 71 70 public override IDeepCloneable Clone(Cloner cloner) { 72 71 return new SymbolicDataAnalysisExpressionTreeInterpreter(this, cloner); … … 79 78 } 80 79 80 protected SymbolicDataAnalysisExpressionTreeInterpreter(string name, string description) 81 : base(name, description) { 82 Parameters.Add(new ValueParameter<BoolValue>(CheckExpressionsWithIntervalArithmeticParameterName, "Switch that determines if the interpreter checks the validity of expressions with interval arithmetic before evaluating the expression.", new BoolValue(false))); 83 Parameters.Add(new ValueParameter<IntValue>(EvaluatedSolutionsParameterName, "A counter for the total number of solutions the interpreter has evaluated", new IntValue(0))); 84 } 85 81 86 [StorableHook(HookType.AfterDeserialization)] 82 87 private void AfterDeserialization() { … … 95 100 96 101 public IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, IEnumerable<int> rows) { 97 return GetSymbolicExpressionTreeValues(tree, dataset, new string[] { "#NOTHING#" }, rows);98 }99 100 public IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, string[] targetVariables, IEnumerable<int> rows) {101 return GetSymbolicExpressionTreeValues(tree, dataset, targetVariables, rows, 1);102 }103 104 // for each row for each horizon for each target variable one value105 public IEnumerable<double> GetSymbolicExpressionTreeValues(ISymbolicExpressionTree tree, Dataset dataset, string[] targetVariables, IEnumerable<int> rows, int horizon) {106 102 if (CheckExpressionsWithIntervalArithmetic.Value) 107 103 throw new NotSupportedException("Interval arithmetic is not yet supported in the symbolic data analysis interpreter."); 108 104 109 105 EvaluatedSolutions.Value++; // increment the evaluated solutions counter 110 var state = PrepareInterpreterState(tree, dataset, targetVariables[0]); 111 112 // produce a n-step forecast for each target variable for all rows 113 var cachedPrognosedValues = new Dictionary<string, double[]>(); 114 //foreach (var targetVariable in targetVariables) 115 // cachedPrognosedValues[targetVariable] = new double[horizon]; 106 var state = PrepareInterpreterState(tree, dataset); 107 116 108 foreach (var rowEnum in rows) { 117 109 int row = rowEnum; 118 for (int localRow = row; localRow < row + horizon; localRow++) { 119 //int localRow = horizonRow; // create a local variable for the ref parameter 120 yield return Evaluate(dataset, ref localRow, row - 1, state, cachedPrognosedValues); 121 //cachedPrognosedValues[targetVariables[c]][horizonRow - row] = prog; 122 state.Reset(); 123 } 110 yield return Evaluate(dataset, ref row, state); 111 state.Reset(); 124 112 } 125 113 } 126 114 127 private InterpreterState PrepareInterpreterState(ISymbolicExpressionTree tree, Dataset dataset , string targetVariable) {115 private InterpreterState PrepareInterpreterState(ISymbolicExpressionTree tree, Dataset dataset) { 128 116 Instruction[] code = SymbolicExpressionTreeCompiler.Compile(tree, OpCodes.MapSymbolToOpCode); 129 117 int necessaryArgStackSize = 0; 130 for (int i = 0; i < code.Length; i++) { 131 Instruction instr = code[i]; 118 foreach (Instruction instr in code) { 132 119 if (instr.opCode == OpCodes.Variable) { 133 120 var variableTreeNode = (VariableTreeNode)instr.dynamicNode; 134 121 instr.iArg0 = dataset.GetReadOnlyDoubleValues(variableTreeNode.VariableName); 135 code[i] = instr;136 122 } else if (instr.opCode == OpCodes.LagVariable) { 137 123 var laggedVariableTreeNode = (LaggedVariableTreeNode)instr.dynamicNode; 138 124 instr.iArg0 = dataset.GetReadOnlyDoubleValues(laggedVariableTreeNode.VariableName); 139 code[i] = instr;140 125 } else if (instr.opCode == OpCodes.VariableCondition) { 141 126 var variableConditionTreeNode = (VariableConditionTreeNode)instr.dynamicNode; … … 148 133 } 149 134 150 pr ivate double Evaluate(Dataset dataset, ref int row, int lastObservedRow, InterpreterState state, Dictionary<string, double[]> cachedPrognosedValues) {135 protected virtual double Evaluate(Dataset dataset, ref int row, InterpreterState state) { 151 136 Instruction currentInstr = state.NextInstruction(); 152 137 switch (currentInstr.opCode) { 153 138 case OpCodes.Add: { 154 double s = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);155 for (int i = 1; i < currentInstr.nArguments; i++) { 156 s += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);139 double s = Evaluate(dataset, ref row, state); 140 for (int i = 1; i < currentInstr.nArguments; i++) { 141 s += Evaluate(dataset, ref row, state); 157 142 } 158 143 return s; 159 144 } 160 145 case OpCodes.Sub: { 161 double s = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);162 for (int i = 1; i < currentInstr.nArguments; i++) { 163 s -= Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);146 double s = Evaluate(dataset, ref row, state); 147 for (int i = 1; i < currentInstr.nArguments; i++) { 148 s -= Evaluate(dataset, ref row, state); 164 149 } 165 150 if (currentInstr.nArguments == 1) s = -s; … … 167 152 } 168 153 case OpCodes.Mul: { 169 double p = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);170 for (int i = 1; i < currentInstr.nArguments; i++) { 171 p *= Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);154 double p = Evaluate(dataset, ref row, state); 155 for (int i = 1; i < currentInstr.nArguments; i++) { 156 p *= Evaluate(dataset, ref row, state); 172 157 } 173 158 return p; 174 159 } 175 160 case OpCodes.Div: { 176 double p = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);177 for (int i = 1; i < currentInstr.nArguments; i++) { 178 p /= Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);161 double p = Evaluate(dataset, ref row, state); 162 for (int i = 1; i < currentInstr.nArguments; i++) { 163 p /= Evaluate(dataset, ref row, state); 179 164 } 180 165 if (currentInstr.nArguments == 1) p = 1.0 / p; … … 182 167 } 183 168 case OpCodes.Average: { 184 double sum = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);185 for (int i = 1; i < currentInstr.nArguments; i++) { 186 sum += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);169 double sum = Evaluate(dataset, ref row, state); 170 for (int i = 1; i < currentInstr.nArguments; i++) { 171 sum += Evaluate(dataset, ref row, state); 187 172 } 188 173 return sum / currentInstr.nArguments; 189 174 } 190 175 case OpCodes.Cos: { 191 return Math.Cos(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));176 return Math.Cos(Evaluate(dataset, ref row, state)); 192 177 } 193 178 case OpCodes.Sin: { 194 return Math.Sin(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));179 return Math.Sin(Evaluate(dataset, ref row, state)); 195 180 } 196 181 case OpCodes.Tan: { 197 return Math.Tan(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));182 return Math.Tan(Evaluate(dataset, ref row, state)); 198 183 } 199 184 case OpCodes.Square: { 200 return Math.Pow(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues), 2);185 return Math.Pow(Evaluate(dataset, ref row, state), 2); 201 186 } 202 187 case OpCodes.Power: { 203 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);204 double y = Math.Round(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));188 double x = Evaluate(dataset, ref row, state); 189 double y = Math.Round(Evaluate(dataset, ref row, state)); 205 190 return Math.Pow(x, y); 206 191 } 207 192 case OpCodes.SquareRoot: { 208 return Math.Sqrt(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));193 return Math.Sqrt(Evaluate(dataset, ref row, state)); 209 194 } 210 195 case OpCodes.Root: { 211 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);212 double y = Math.Round(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));196 double x = Evaluate(dataset, ref row, state); 197 double y = Math.Round(Evaluate(dataset, ref row, state)); 213 198 return Math.Pow(x, 1 / y); 214 199 } 215 200 case OpCodes.Exp: { 216 return Math.Exp(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));201 return Math.Exp(Evaluate(dataset, ref row, state)); 217 202 } 218 203 case OpCodes.Log: { 219 return Math.Log(Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues));204 return Math.Log(Evaluate(dataset, ref row, state)); 220 205 } 221 206 case OpCodes.Gamma: { 222 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);207 var x = Evaluate(dataset, ref row, state); 223 208 if (double.IsNaN(x)) return double.NaN; 224 209 else return alglib.gammafunction(x); 225 210 } 226 211 case OpCodes.Psi: { 227 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);212 var x = Evaluate(dataset, ref row, state); 228 213 if (double.IsNaN(x)) return double.NaN; 229 214 else if (x.IsAlmost(0.0)) return double.NaN; … … 232 217 } 233 218 case OpCodes.Dawson: { 234 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);219 var x = Evaluate(dataset, ref row, state); 235 220 if (double.IsNaN(x)) return double.NaN; 236 221 return alglib.dawsonintegral(x); 237 222 } 238 223 case OpCodes.ExponentialIntegralEi: { 239 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);224 var x = Evaluate(dataset, ref row, state); 240 225 if (double.IsNaN(x)) return double.NaN; 241 226 return alglib.exponentialintegralei(x); … … 243 228 case OpCodes.SineIntegral: { 244 229 double si, ci; 245 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);230 var x = Evaluate(dataset, ref row, state); 246 231 if (double.IsNaN(x)) return double.NaN; 247 232 else { … … 252 237 case OpCodes.CosineIntegral: { 253 238 double si, ci; 254 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);239 var x = Evaluate(dataset, ref row, state); 255 240 if (double.IsNaN(x)) return double.NaN; 256 241 else { … … 261 246 case OpCodes.HyperbolicSineIntegral: { 262 247 double shi, chi; 263 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);248 var x = Evaluate(dataset, ref row, state); 264 249 if (double.IsNaN(x)) return double.NaN; 265 250 else { … … 270 255 case OpCodes.HyperbolicCosineIntegral: { 271 256 double shi, chi; 272 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);257 var x = Evaluate(dataset, ref row, state); 273 258 if (double.IsNaN(x)) return double.NaN; 274 259 else { … … 279 264 case OpCodes.FresnelCosineIntegral: { 280 265 double c = 0, s = 0; 281 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);266 var x = Evaluate(dataset, ref row, state); 282 267 if (double.IsNaN(x)) return double.NaN; 283 268 else { … … 288 273 case OpCodes.FresnelSineIntegral: { 289 274 double c = 0, s = 0; 290 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);275 var x = Evaluate(dataset, ref row, state); 291 276 if (double.IsNaN(x)) return double.NaN; 292 277 else { … … 297 282 case OpCodes.AiryA: { 298 283 double ai, aip, bi, bip; 299 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);284 var x = Evaluate(dataset, ref row, state); 300 285 if (double.IsNaN(x)) return double.NaN; 301 286 else { … … 306 291 case OpCodes.AiryB: { 307 292 double ai, aip, bi, bip; 308 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);293 var x = Evaluate(dataset, ref row, state); 309 294 if (double.IsNaN(x)) return double.NaN; 310 295 else { … … 314 299 } 315 300 case OpCodes.Norm: { 316 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);301 var x = Evaluate(dataset, ref row, state); 317 302 if (double.IsNaN(x)) return double.NaN; 318 303 else return alglib.normaldistribution(x); 319 304 } 320 305 case OpCodes.Erf: { 321 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);306 var x = Evaluate(dataset, ref row, state); 322 307 if (double.IsNaN(x)) return double.NaN; 323 308 else return alglib.errorfunction(x); 324 309 } 325 310 case OpCodes.Bessel: { 326 var x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);311 var x = Evaluate(dataset, ref row, state); 327 312 if (double.IsNaN(x)) return double.NaN; 328 313 else return alglib.besseli0(x); 329 314 } 330 315 case OpCodes.IfThenElse: { 331 double condition = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);316 double condition = Evaluate(dataset, ref row, state); 332 317 double result; 333 318 if (condition > 0.0) { 334 result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); state.SkipInstructions();319 result = Evaluate(dataset, ref row, state); state.SkipInstructions(); 335 320 } else { 336 state.SkipInstructions(); result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);321 state.SkipInstructions(); result = Evaluate(dataset, ref row, state); 337 322 } 338 323 return result; 339 324 } 340 325 case OpCodes.AND: { 341 double result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);342 for (int i = 1; i < currentInstr.nArguments; i++) { 343 if (result > 0.0) result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);326 double result = Evaluate(dataset, ref row, state); 327 for (int i = 1; i < currentInstr.nArguments; i++) { 328 if (result > 0.0) result = Evaluate(dataset, ref row, state); 344 329 else { 345 330 state.SkipInstructions(); … … 349 334 } 350 335 case OpCodes.OR: { 351 double result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);352 for (int i = 1; i < currentInstr.nArguments; i++) { 353 if (result <= 0.0) result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);336 double result = Evaluate(dataset, ref row, state); 337 for (int i = 1; i < currentInstr.nArguments; i++) { 338 if (result <= 0.0) result = Evaluate(dataset, ref row, state); 354 339 else { 355 340 state.SkipInstructions(); … … 359 344 } 360 345 case OpCodes.NOT: { 361 return Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues) > 0.0 ? -1.0 : 1.0;346 return Evaluate(dataset, ref row, state) > 0.0 ? -1.0 : 1.0; 362 347 } 363 348 case OpCodes.GT: { 364 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);365 double y = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);349 double x = Evaluate(dataset, ref row, state); 350 double y = Evaluate(dataset, ref row, state); 366 351 if (x > y) return 1.0; 367 352 else return -1.0; 368 353 } 369 354 case OpCodes.LT: { 370 double x = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);371 double y = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);355 double x = Evaluate(dataset, ref row, state); 356 double y = Evaluate(dataset, ref row, state); 372 357 if (x < y) return 1.0; 373 358 else return -1.0; … … 376 361 var timeLagTreeNode = (LaggedTreeNode)currentInstr.dynamicNode; 377 362 row += timeLagTreeNode.Lag; 378 double result = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);363 double result = Evaluate(dataset, ref row, state); 379 364 row -= timeLagTreeNode.Lag; 380 365 return result; … … 386 371 for (int i = 0; i < Math.Abs(timeLagTreeNode.Lag); i++) { 387 372 row += Math.Sign(timeLagTreeNode.Lag); 388 sum += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);373 sum += Evaluate(dataset, ref row, state); 389 374 state.ProgramCounter = savedPc; 390 375 } 391 376 row -= timeLagTreeNode.Lag; 392 sum += Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);377 sum += Evaluate(dataset, ref row, state); 393 378 return sum; 394 379 } … … 400 385 case OpCodes.Derivative: { 401 386 int savedPc = state.ProgramCounter; 402 double f_0 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); row--;387 double f_0 = Evaluate(dataset, ref row, state); row--; 403 388 state.ProgramCounter = savedPc; 404 double f_1 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); row -= 2;389 double f_1 = Evaluate(dataset, ref row, state); row -= 2; 405 390 state.ProgramCounter = savedPc; 406 double f_3 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues); row--;391 double f_3 = Evaluate(dataset, ref row, state); row--; 407 392 state.ProgramCounter = savedPc; 408 double f_4 = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);393 double f_4 = Evaluate(dataset, ref row, state); 409 394 row += 4; 410 395 … … 415 400 double[] argValues = new double[currentInstr.nArguments]; 416 401 for (int i = 0; i < currentInstr.nArguments; i++) { 417 argValues[i] = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);402 argValues[i] = Evaluate(dataset, ref row, state); 418 403 } 419 404 // push on argument values on stack … … 425 410 state.ProgramCounter = (ushort)currentInstr.iArg0; 426 411 // evaluate the function 427 double v = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);412 double v = Evaluate(dataset, ref row, state); 428 413 429 414 // delete the stack frame … … 440 425 if (row < 0 || row >= dataset.Rows) return double.NaN; 441 426 var variableTreeNode = (VariableTreeNode)currentInstr.dynamicNode; 442 if (row <= lastObservedRow || !cachedPrognosedValues.ContainsKey(variableTreeNode.VariableName)) return ((IList<double>)currentInstr.iArg0)[row] * variableTreeNode.Weight; 443 else return cachedPrognosedValues[variableTreeNode.VariableName][row - lastObservedRow - 1] * variableTreeNode.Weight; 427 return ((IList<double>)currentInstr.iArg0)[row] * variableTreeNode.Weight; 444 428 } 445 429 case OpCodes.LagVariable: { 446 430 var laggedVariableTreeNode = (LaggedVariableTreeNode)currentInstr.dynamicNode; 447 431 int actualRow = row + laggedVariableTreeNode.Lag; 448 if (actualRow < 0 || actualRow >= dataset.Rows) 449 return double.NaN; 450 if (actualRow <= lastObservedRow || !cachedPrognosedValues.ContainsKey(laggedVariableTreeNode.VariableName)) return ((IList<double>)currentInstr.iArg0)[actualRow] * laggedVariableTreeNode.Weight; 451 else return cachedPrognosedValues[laggedVariableTreeNode.VariableName][actualRow - lastObservedRow - 1] * laggedVariableTreeNode.Weight; 432 if (actualRow < 0 || actualRow >= dataset.Rows) return double.NaN; 433 return ((IList<double>)currentInstr.iArg0)[actualRow] * laggedVariableTreeNode.Weight; 452 434 } 453 435 case OpCodes.Constant: { … … 459 441 //to determine the relative amounts of the true and false branch see http://en.wikipedia.org/wiki/Logistic_function 460 442 case OpCodes.VariableCondition: { 461 if (row < 0 || row >= dataset.Rows) 462 return double.NaN; 443 if (row < 0 || row >= dataset.Rows) return double.NaN; 463 444 var variableConditionTreeNode = (VariableConditionTreeNode)currentInstr.dynamicNode; 464 double variableValue; 465 if (row <= lastObservedRow || !cachedPrognosedValues.ContainsKey(variableConditionTreeNode.VariableName)) 466 variableValue = ((IList<double>)currentInstr.iArg0)[row]; 467 else 468 variableValue = cachedPrognosedValues[variableConditionTreeNode.VariableName][row - lastObservedRow - 1]; 445 double variableValue = ((IList<double>)currentInstr.iArg0)[row]; 469 446 470 447 double x = variableValue - variableConditionTreeNode.Threshold; 471 448 double p = 1 / (1 + Math.Exp(-variableConditionTreeNode.Slope * x)); 472 449 473 double trueBranch = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);474 double falseBranch = Evaluate(dataset, ref row, lastObservedRow, state, cachedPrognosedValues);450 double trueBranch = Evaluate(dataset, ref row, state); 451 double falseBranch = Evaluate(dataset, ref row, state); 475 452 476 453 return trueBranch * p + falseBranch * (1 - p); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Views/3.4/HeuristicLab.Problems.DataAnalysis.Views-3.4.csproj
r7886 r7989 312 312 </BootstrapperPackage> 313 313 </ItemGroup> 314 <ItemGroup> 315 <Folder Include="obj\" /> 316 </ItemGroup> 314 <ItemGroup /> 317 315 <ItemGroup> 318 316 <ProjectReference Include="..\..\HeuristicLab.Problems.DataAnalysis\3.4\HeuristicLab.Problems.DataAnalysis-3.4.csproj"> -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Views/3.4/Solution Views/TimeSeriesPrognosisSolutionView.cs
r7183 r7989 22 22 using System.Windows.Forms; 23 23 using HeuristicLab.Core; 24 using HeuristicLab.Data;25 24 using HeuristicLab.MainForm; 26 25 27 26 namespace HeuristicLab.Problems.DataAnalysis.Views { 28 27 [View("TimeSeriesPrognosisnSolution View")] 29 [Content(typeof(TimeSeriesPrognosisSolution Base), false)]28 [Content(typeof(TimeSeriesPrognosisSolution), false)] 30 29 public partial class TimeSeriesPrognosisSolutionView : DataAnalysisSolutionView { 31 30 public TimeSeriesPrognosisSolutionView() { … … 33 32 } 34 33 35 public new TimeSeriesPrognosisSolution BaseContent {36 get { return (TimeSeriesPrognosisSolution Base)base.Content; }34 public new TimeSeriesPrognosisSolution Content { 35 get { return (TimeSeriesPrognosisSolution)base.Content; } 37 36 set { base.Content = value; } 38 37 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Views/3.4/TimeSeriesPrognosis/TimeSeriesPrognosisSolutionLineChartView.cs
r7463 r7989 58 58 } 59 59 60 private void UpdateTargetVariables() {61 // populate combobox62 targetVariableComboBox.Items.Clear();63 if (Content != null) {64 if (testPrognosisStart < Content.ProblemData.TestPartition.Start || testPrognosisStart >= Content.ProblemData.TestPartition.End) {65 testPrognosisStart = Content.ProblemData.TestPartition.Start;66 }67 foreach (var targetVariable in Content.ProblemData.TargetVariables)68 targetVariableComboBox.Items.Add(targetVariable);69 70 targetVariableComboBox.SelectedIndex = 0;71 }72 }73 74 75 76 60 private void RedrawChart() { 77 61 this.chart.Series.Clear(); … … 79 63 this.chart.ChartAreas[0].AxisX.Minimum = 0; 80 64 this.chart.ChartAreas[0].AxisX.Maximum = Content.ProblemData.Dataset.Rows - 1; 81 string targetVariable = (string)targetVariableComboBox.SelectedItem; 82 int varIndex = Content.ProblemData.TargetVariables.ToList().IndexOf(targetVariable); 65 string targetVariable = Content.ProblemData.TargetVariable; 83 66 84 67 this.chart.Series.Add(TARGETVARIABLE_SERIES_NAME); … … 94 77 this.chart.Series[PROGNOSEDVALUES_TRAINING_SERIES_NAME].Points 95 78 .DataBindXY(Content.ProblemData.TrainingIndizes.ToArray(), 96 Content. PrognosedTrainingValues.SelectMany(x => x).Skip(varIndex).TakeEvery(Content.ProblemData.TargetVariables.Count()).ToArray());79 Content.GetPrognosedValues(Content.ProblemData.TrainingIndizes.Take(1), Content.ProblemData.TrainingPartition.Size).SelectMany(x => x).ToArray()); 97 80 } else { 98 81 this.chart.Series[PROGNOSEDVALUES_TRAINING_SERIES_NAME].Points 99 82 .DataBindXY(Content.ProblemData.TrainingIndizes.ToArray(), 100 Content.GetPrognosedValues(Content.ProblemData.TrainingIndizes, 1).SelectMany(x => x .Single()).Skip(varIndex).TakeEvery(Content.ProblemData.TargetVariables.Count()).ToArray());83 Content.GetPrognosedValues(Content.ProblemData.TrainingIndizes, 1).SelectMany(x => x).ToArray()); 101 84 } 102 85 this.chart.Series[PROGNOSEDVALUES_TRAINING_SERIES_NAME].Tag = Content; … … 110 93 this.chart.Series[PROGNOSEDVALUES_TEST_SERIES_NAME].ChartType = SeriesChartType.FastLine; 111 94 if (prognosedValuesCheckbox.Checked) { 112 int offsetToStart = testPrognosisStart - Content.ProblemData.TestPartition.Start;113 95 this.chart.Series[PROGNOSEDVALUES_TEST_SERIES_NAME].Points 114 .DataBindXY(Content.ProblemData.TestIndizes.Skip(offsetToStart).ToArray(), 115 Content.GetPrognosedValues(Enumerable.Range(testPrognosisStart, 1), Content.ProblemData.TestPartition.End - testPrognosisStart) 116 .SelectMany(x => x.SelectMany(y => y)) 117 .Skip(varIndex) 118 .TakeEvery(Content.ProblemData.TargetVariables.Count()) 119 .ToArray()); 96 .DataBindXY(Content.ProblemData.TestIndizes.ToArray(), 97 Content.GetPrognosedValues(Content.ProblemData.TestIndizes.Take(1), Content.ProblemData.TestPartition.Size).SelectMany(x => x).ToArray()); 120 98 } else { 121 99 this.chart.Series[PROGNOSEDVALUES_TEST_SERIES_NAME].Points 122 100 .DataBindXY(Content.ProblemData.TestIndizes.ToArray(), 123 Content.GetPrognosedValues(Content.ProblemData.TestIndizes, 1) 124 .SelectMany(x => x.Single()) 125 .Skip(varIndex) 126 .TakeEvery(Content.ProblemData.TargetVariables.Count()) 127 .ToArray()); 101 Content.GetPrognosedValues(Content.ProblemData.TestIndizes, 1).SelectMany(x => x).ToArray()); 128 102 } 129 103 this.chart.Series[PROGNOSEDVALUES_TEST_SERIES_NAME].Tag = Content; … … 159 133 protected override void OnContentChanged() { 160 134 base.OnContentChanged(); 161 UpdateTargetVariables();135 RedrawChart(); 162 136 } 163 137 164 138 private void Content_ProblemDataChanged(object sender, EventArgs e) { 165 UpdateTargetVariables();139 RedrawChart(); 166 140 } 167 141 private void Content_ModelChanged(object sender, EventArgs e) { … … 212 186 this.CreateAndAddStripLine("Training and Test", start, row, Color.FromArgb(40, Color.Green), Color.FromArgb(40, Color.Red), ChartHatchStyle.WideUpwardDiagonal); 213 187 break; 214 default: 215 // should not happen 216 break; 188 default: throw new NotSupportedException(); 217 189 } 218 190 curAttr = attr[row]; … … 242 214 } 243 215 } else if (Content != null) { 244 string targetVariable = (string)targetVariableComboBox.SelectedItem; 245 int varIndex = Content.ProblemData.TargetVariables.ToList().IndexOf(targetVariable); 246 216 string targetVariable = Content.ProblemData.TargetVariable; 247 217 248 218 IEnumerable<int> indizes = null; 249 219 IEnumerable<double> predictedValues = null; 220 250 221 switch (series.Name) { 251 222 case PROGNOSEDVALUES_TRAINING_SERIES_NAME: 252 223 indizes = Content.ProblemData.TrainingIndizes.ToArray(); 253 predictedValues = 254 Content.PrognosedTrainingValues.SelectMany(x => x).Skip(varIndex).TakeEvery( 255 Content.ProblemData.TargetVariables.Count()).ToArray(); 224 predictedValues = Content.GetPrognosedValues(Content.ProblemData.TrainingIndizes.Take(1), Content.ProblemData.TrainingPartition.Size).First(); 256 225 break; 257 226 case PROGNOSEDVALUES_TEST_SERIES_NAME: 258 227 testPrognosisStart = Content.ProblemData.TestPartition.Start; 259 228 indizes = Content.ProblemData.TestIndizes.ToArray(); 260 predictedValues = Content.PrognosedTestValues.SelectMany(x => x).Skip(varIndex).TakeEvery( 261 Content.ProblemData.TargetVariables.Count()).ToArray(); 229 predictedValues = Content.GetPrognosedValues(Content.ProblemData.TestIndizes.Take(1), Content.ProblemData.TestPartition.Size).First(); 262 230 break; 263 231 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Views/3.4/TimeSeriesPrognosis/TimeSeriesPrognosisSolutionPrognosedValuesView.cs
r7154 r7989 82 82 83 83 private void UpdateEstimatedValues() { 84 if (InvokeRequired) Invoke((Action)UpdateEstimatedValues); 85 else { 86 StringMatrix matrix = null; 87 List<string> columnNames = new List<string>(); 88 if (Content != null) { 89 columnNames.Add("Id"); 84 if (InvokeRequired) { 85 Invoke((Action)UpdateEstimatedValues); 86 return; 87 } 88 if (Content == null) return; 90 89 91 string[,] values = new string[Content.ProblemData.Dataset.Rows, 1 + 3 * Content.ProblemData.TargetVariables.Count()]; 92 foreach (var row in Enumerable.Range(0, Content.ProblemData.Dataset.Rows)) 93 values[row, 0] = row.ToString(); 90 var targetVariable = Content.ProblemData.TargetVariable; 91 StringMatrix matrix = null; 92 List<string> columnNames = new List<string>(); 93 columnNames.Add("Id"); 94 94 95 var allPrognosedTraining = Content.PrognosedTrainingValues.SelectMany(x=>x).ToArray(); 96 var allPrognosedTest = Content.PrognosedTestValues.SelectMany(x => x).ToArray(); 97 98 int i = 0; 99 int targetVariableIndex = 0; 100 foreach (var targetVariable in Content.ProblemData.TargetVariables) { 101 var prognosedTraining = 102 allPrognosedTraining.Skip(targetVariableIndex).TakeEvery(Content.ProblemData.TargetVariables.Count()); 103 var prognosedTest = 104 allPrognosedTest.Skip(targetVariableIndex).TakeEvery(Content.ProblemData.TargetVariables.Count()); 105 106 double[] target = Content.ProblemData.Dataset.GetDoubleValues(targetVariable).ToArray(); 107 108 var prognosedTrainingEnumerator = prognosedTraining.GetEnumerator(); 109 foreach (var row in Content.ProblemData.TrainingIndizes) { 110 prognosedTrainingEnumerator.MoveNext(); 111 values[row, i + 2] = prognosedTrainingEnumerator.Current.ToString(); 112 } 113 114 var prognosedTestEnumerator = prognosedTest.GetEnumerator(); 115 foreach (var row in Content.ProblemData.TestIndizes) { 116 prognosedTestEnumerator.MoveNext(); 117 values[row, i + 3] = prognosedTestEnumerator.Current.ToString(); 118 } 119 120 foreach (var row in Enumerable.Range(0, Content.ProblemData.Dataset.Rows)) { 121 values[row, i + 1] = target[row].ToString(); 122 } 123 124 columnNames.AddRange(new string[] { targetVariable + "(actual)", targetVariable + "(training)", targetVariable + "(test)" }); 125 i += 3; 126 targetVariableIndex++; 127 } // foreach 95 string[,] values = new string[Content.ProblemData.Dataset.Rows, 4]; 96 var prognosedTraining = Content.GetPrognosedValues(Content.ProblemData.TrainingIndizes.Take(1), Content.ProblemData.TrainingPartition.Size).First(); 97 var prognosedTest = Content.GetPrognosedValues(Content.ProblemData.TrainingIndizes.Take(1), Content.ProblemData.TestPartition.Size).First(); 98 double[] target = Content.ProblemData.Dataset.GetDoubleValues(targetVariable).ToArray(); 128 99 129 100 130 matrix = new StringMatrix(values); 131 matrix.ColumnNames = columnNames.ToArray(); 132 matrix.SortableView = true; 101 foreach (var row in Enumerable.Range(0, Content.ProblemData.Dataset.Rows)) { 102 values[row, 0] = row.ToString(); 103 values[row, 1] = target[row].ToString(); 104 } 133 105 134 } // if 135 matrixView.Content = matrix; 106 var rowsEnumerator = Content.ProblemData.TrainingIndizes.GetEnumerator(); 107 var prognosisEnumerator = prognosedTraining.GetEnumerator(); 108 while (rowsEnumerator.MoveNext() & prognosisEnumerator.MoveNext()) { 109 var row = rowsEnumerator.Current; 110 var prognosis = prognosisEnumerator.Current; 111 values[row, 2] = prognosis.ToString(); 136 112 } 113 114 rowsEnumerator = Content.ProblemData.TestIndizes.GetEnumerator(); 115 prognosisEnumerator = prognosedTest.GetEnumerator(); 116 while (rowsEnumerator.MoveNext() & prognosisEnumerator.MoveNext()) { 117 var row = rowsEnumerator.Current; 118 var prognosis = prognosisEnumerator.Current; 119 values[row, 3] = prognosis.ToString(); 120 } 121 122 columnNames.AddRange(new string[] { targetVariable + "(actual)", targetVariable + "(training)", targetVariable + "(test)" }); 123 matrix = new StringMatrix(values); 124 matrix.ColumnNames = columnNames.ToArray(); 125 matrix.SortableView = true; 137 126 } 138 127 #endregion -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Dataset.cs
r7842 r7989 193 193 if (values == null) throw new ArgumentException("The varialbe " + variableName + " is not a double variable."); 194 194 195 foreach (int index in rows) 196 yield return values[index]; 195 return rows.Select(index => values[index]); 197 196 } 198 197 -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/DatasetExtensions.cs
r7154 r7989 20 20 #endregion 21 21 22 using System;23 22 using System.Collections.Generic; 24 using System.Linq;25 using System.Text;26 using HeuristicLab.PluginInfrastructure;27 23 28 24 namespace HeuristicLab.Problems.DataAnalysis { 29 25 public static class DatasetExtensions { 30 public static IEnumerable<double> GetVectorEnumerable(this Dataset ds, IEnumerable<string> targetVariables, IEnumerable<int> rows) {31 var vars = targetVariables.ToArray();32 var values = (from var in vars33 select ds.GetReadOnlyDoubleValues(var))34 .ToArray();35 36 foreach (var r in rows) {37 for (int i = 0; i < values.Length; i++)38 yield return values[i][r];39 }40 }41 42 26 public static IEnumerable<T> TakeEvery<T>(this IEnumerable<T> xs, int nth) { 43 27 int i = 0; -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisProblemData.cs
r7886 r7989 33 33 [Item("TimeSeriesPrognosisProblemData", "Represents an item containing all data defining a time series prognosis problem.")] 34 34 public class TimeSeriesPrognosisProblemData : DataAnalysisProblemData, ITimeSeriesPrognosisProblemData { 35 protected const string TargetVariable sParameterName = "TargetVariables";35 protected const string TargetVariableParameterName = "TargetVariable"; 36 36 37 37 #region default data … … 1540 1540 private static readonly Dataset defaultDataset; 1541 1541 private static readonly IEnumerable<string> defaultAllowedInputVariables; 1542 private static readonly string [] defaultTargetVariables;1542 private static readonly string defaultTargetVariable; 1543 1543 1544 1544 private static readonly TimeSeriesPrognosisProblemData emptyProblemData; … … 1551 1551 defaultDataset.Name = "Mackey-Glass (t=17) Time Series Benchmark Dataset"; 1552 1552 defaultAllowedInputVariables = new List<string>() { "x" }; 1553 defaultTargetVariable s = new string[] { "x" };1553 defaultTargetVariable = "x"; 1554 1554 1555 1555 var problemData = new TimeSeriesPrognosisProblemData(); … … 1563 1563 problemData.Parameters.Add(new FixedValueParameter<IntRange>(TrainingPartitionParameterName, "", (IntRange)new IntRange(0, 0).AsReadOnly())); 1564 1564 problemData.Parameters.Add(new FixedValueParameter<IntRange>(TestPartitionParameterName, "", (IntRange)new IntRange(0, 0).AsReadOnly())); 1565 problemData.Parameters.Add(new ConstrainedValueParameter<StringValue>(TargetVariable sParameterName, new ItemSet<StringValue>()));1565 problemData.Parameters.Add(new ConstrainedValueParameter<StringValue>(TargetVariableParameterName, new ItemSet<StringValue>())); 1566 1566 emptyProblemData = problemData; 1567 1567 } 1568 1568 #endregion 1569 1569 1570 public ValueParameter<CheckedItemList<StringValue>> TargetVariablesParameter {1571 get { return ( ValueParameter<CheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }1570 public ConstrainedValueParameter<StringValue> TargetVariableParameter { 1571 get { return (ConstrainedValueParameter<StringValue>)Parameters[TargetVariableParameterName]; } 1572 1572 } 1573 public IEnumerable<string> TargetVariables{1574 get { return TargetVariable sParameter.Value.CheckedItems.Select(x => x.Value.Value); }1573 public string TargetVariable { 1574 get { return TargetVariableParameter.Value.Value; } 1575 1575 } 1576 1576 … … 1592 1592 1593 1593 public TimeSeriesPrognosisProblemData() 1594 : this(defaultDataset, defaultAllowedInputVariables, defaultTargetVariable s) {1594 : this(defaultDataset, defaultAllowedInputVariables, defaultTargetVariable) { 1595 1595 } 1596 1596 1597 public TimeSeriesPrognosisProblemData(Dataset dataset, IEnumerable<string> allowedInputVariables, IEnumerable<string> targetVariables)1597 public TimeSeriesPrognosisProblemData(Dataset dataset, IEnumerable<string> allowedInputVariables, string targetVariable) 1598 1598 : base(dataset, allowedInputVariables) { 1599 1599 var variables = InputVariables.Select(x => x.AsReadOnly()).ToList(); 1600 var targetVariablesList = new CheckedItemList<StringValue>(variables); 1601 foreach (var targetVar in targetVariables) { 1602 targetVariablesList.SetItemCheckedState(targetVariablesList.Single(x => x.Value == targetVar), true); 1603 } 1604 Parameters.Add(new FixedValueParameter<CheckedItemList<StringValue>>(TargetVariablesParameterName, targetVariablesList)); 1600 Parameters.Add(new ConstrainedValueParameter<StringValue>(TargetVariableParameterName, new ItemSet<StringValue>(variables), variables.First(x => x.Value == targetVariable))); 1605 1601 RegisterParameterEvents(); 1606 1602 } 1607 1603 1608 1604 private void RegisterParameterEvents() { 1609 TargetVariable sParameter.Value.CheckedItemsChanged += TargetVariableParameter_ValueChanged;1605 TargetVariableParameter.ValueChanged += new EventHandler(TargetVariableParameter_ValueChanged); 1610 1606 } 1611 1612 1607 private void TargetVariableParameter_ValueChanged(object sender, EventArgs e) { 1613 1608 OnChanged(); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisSolution.cs
r7160 r7989 21 21 22 22 using System.Collections.Generic; 23 using System.Linq;24 23 using HeuristicLab.Common; 25 24 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; … … 33 32 34 33 [StorableConstructor] 35 protected TimeSeriesPrognosisSolution(bool deserializing) 36 : base(deserializing) { 37 } 38 protected TimeSeriesPrognosisSolution(TimeSeriesPrognosisSolution original, Cloner cloner) 39 : base(original, cloner) { 40 } 41 protected TimeSeriesPrognosisSolution(ITimeSeriesPrognosisModel model, ITimeSeriesPrognosisProblemData problemData) 42 : base(model, problemData) { 43 } 34 protected TimeSeriesPrognosisSolution(bool deserializing) : base(deserializing) { } 35 protected TimeSeriesPrognosisSolution(TimeSeriesPrognosisSolution original, Cloner cloner) : base(original, cloner) { } 36 protected TimeSeriesPrognosisSolution(ITimeSeriesPrognosisModel model, ITimeSeriesPrognosisProblemData problemData) : base(model, problemData) { } 44 37 45 38 protected override void RecalculateResults() { … … 47 40 } 48 41 49 public override IEnumerable<IEnumerable<double>> PrognosedTrainingValues { 50 get { 51 return GetPrognosedValues(ProblemData.TrainingIndizes.Take(1), 52 ProblemData.TrainingPartition.End - ProblemData.TrainingPartition.Start) 53 .First(); 54 } 55 } 56 public override IEnumerable<IEnumerable<double>> PrognosedTestValues { 57 get { 58 return GetPrognosedValues(ProblemData.TestIndizes.Take(1), 59 ProblemData.TestPartition.End - ProblemData.TestPartition.Start) 60 .First(); 61 } 62 } 63 public override IEnumerable<IEnumerable<IEnumerable<double>>> GetPrognosedValues(IEnumerable<int> rows, int horizon) { 42 public override IEnumerable<IEnumerable<double>> GetPrognosedValues(IEnumerable<int> rows, int horizon) { 64 43 return Model.GetPrognosedValues(ProblemData.Dataset, rows, horizon); 65 }66 67 protected override void OnProblemDataChanged() {68 base.OnProblemDataChanged();69 }70 71 protected override void OnModelChanged() {72 base.OnModelChanged();73 44 } 74 45 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisSolutionBase.cs
r7194 r7989 21 21 22 22 using System; 23 using System.Collections.Concurrent;24 23 using System.Collections.Generic; 25 24 using System.Linq; … … 75 74 } 76 75 77 public abstract IEnumerable<IEnumerable<double>> PrognosedTrainingValues { get; } 78 public abstract IEnumerable<IEnumerable<double>> PrognosedTestValues { get; } 79 public abstract IEnumerable<IEnumerable<IEnumerable<double>>> GetPrognosedValues(IEnumerable<int> rows, int horizon); 76 public abstract IEnumerable<IEnumerable<double>> GetPrognosedValues(IEnumerable<int> rows, int horizon); 80 77 81 78 #region Results 82 public double []TrainingMeanSquaredError {83 get { return ((Double Array)this[TrainingMeanSquaredErrorResultName].Value).ToArray(); }84 private set { this[TrainingMeanSquaredErrorResultName].Value = new DoubleArray(value); }85 } 86 public double []TestMeanSquaredError {87 get { return ((Double Array)this[TestMeanSquaredErrorResultName].Value).ToArray(); }88 private set { this[TestMeanSquaredErrorResultName].Value = new DoubleArray(value); }89 } 90 public double []TrainingMeanAbsoluteError {91 get { return ((Double Array)this[TrainingMeanAbsoluteErrorResultName].Value).ToArray(); }92 private set { this[TrainingMeanAbsoluteErrorResultName].Value = new DoubleArray(value); }93 } 94 public double []TestMeanAbsoluteError {95 get { return ((Double Array)this[TestMeanAbsoluteErrorResultName].Value).ToArray(); }96 private set { this[TestMeanAbsoluteErrorResultName].Value = new DoubleArray(value); }97 } 98 public double []TrainingRSquared {99 get { return ((Double Array)this[TrainingSquaredCorrelationResultName].Value).ToArray(); }100 private set { this[TrainingSquaredCorrelationResultName].Value = new DoubleArray(value); }101 } 102 public double []TestRSquared {103 get { return ((Double Array)this[TestSquaredCorrelationResultName].Value).ToArray(); }104 private set { this[TestSquaredCorrelationResultName].Value = new DoubleArray(value); }105 } 106 public double []TrainingRelativeError {107 get { return ((Double Array)this[TrainingRelativeErrorResultName].Value).ToArray(); }108 private set { this[TrainingRelativeErrorResultName].Value = new DoubleArray(value); }109 } 110 public double []TestRelativeError {111 get { return ((Double Array)this[TestRelativeErrorResultName].Value).ToArray(); }112 private set { this[TestRelativeErrorResultName].Value = new DoubleArray(value); }113 } 114 public double []TrainingNormalizedMeanSquaredError {115 get { return ((Double Array)this[TrainingNormalizedMeanSquaredErrorResultName].Value).ToArray(); }116 private set { this[TrainingNormalizedMeanSquaredErrorResultName].Value = new DoubleArray(value); }117 } 118 public double []TestNormalizedMeanSquaredError {119 get { return ((Double Array)this[TestNormalizedMeanSquaredErrorResultName].Value).ToArray(); }120 private set { this[TestNormalizedMeanSquaredErrorResultName].Value = new DoubleArray(value); }121 } 122 public double []TrainingDirectionalSymmetry {123 get { return ((Double Array)this[TrainingDirectionalSymmetryResultName].Value).ToArray(); }124 private set { this[TrainingDirectionalSymmetryResultName].Value = new DoubleArray(value); }125 } 126 public double []TestDirectionalSymmetry {127 get { return ((Double Array)this[TestDirectionalSymmetryResultName].Value).ToArray(); }128 private set { this[TestDirectionalSymmetryResultName].Value = new DoubleArray(value); }129 } 130 public double []TrainingWeightedDirectionalSymmetry {131 get { return ((Double Array)this[TrainingWeightedDirectionalSymmetryResultName].Value).ToArray(); }132 private set { this[TrainingWeightedDirectionalSymmetryResultName].Value = new DoubleArray(value); }133 } 134 public double []TestWeightedDirectionalSymmetry {135 get { return ((Double Array)this[TestWeightedDirectionalSymmetryResultName].Value).ToArray(); }136 private set { this[TestWeightedDirectionalSymmetryResultName].Value = new DoubleArray(value); }137 } 138 public double []TrainingTheilsUStatisticLast {139 get { return ((Double Array)this[TrainingTheilsUStatisticLastResultName].Value).ToArray(); }140 private set { this[TrainingTheilsUStatisticLastResultName].Value = new DoubleArray(value); }141 } 142 public double []TestTheilsUStatisticLast {143 get { return ((Double Array)this[TestTheilsUStatisticLastResultName].Value).ToArray(); }144 private set { this[TestTheilsUStatisticLastResultName].Value = new DoubleArray(value); }145 } 146 public double []TrainingTheilsUStatisticMean {147 get { return ((Double Array)this[TrainingTheilsUStatisticMeanResultName].Value).ToArray(); }148 private set { this[TrainingTheilsUStatisticMeanResultName].Value = new DoubleArray(value); }149 } 150 public double []TestTheilsUStatisticMean {151 get { return ((Double Array)this[TestTheilsUStatisticMeanResultName].Value).ToArray(); }152 private set { this[TestTheilsUStatisticMeanResultName].Value = new DoubleArray(value); }153 } 154 public double []TrainingTheilsUStatisticMovingAverage {155 get { return ((Double Array)this[TrainingTheilsUStatisticMaResultName].Value).ToArray(); }156 private set { this[TrainingTheilsUStatisticMaResultName].Value = new DoubleArray(value); }157 } 158 public double []TestTheilsUStatisticMovingAverage {159 get { return ((Double Array)this[TestTheilsUStatisticMaResultName].Value).ToArray(); }160 private set { this[TestTheilsUStatisticMaResultName].Value = new DoubleArray(value); }79 public double TrainingMeanSquaredError { 80 get { return ((DoubleValue)this[TrainingMeanSquaredErrorResultName].Value).Value; } 81 private set { ((DoubleValue)this[TrainingMeanSquaredErrorResultName].Value).Value = value; } 82 } 83 public double TestMeanSquaredError { 84 get { return ((DoubleValue)this[TestMeanSquaredErrorResultName].Value).Value; } 85 private set { ((DoubleValue)this[TestMeanSquaredErrorResultName].Value).Value = value; } 86 } 87 public double TrainingMeanAbsoluteError { 88 get { return ((DoubleValue)this[TrainingMeanAbsoluteErrorResultName].Value).Value; } 89 private set { ((DoubleValue)this[TrainingMeanAbsoluteErrorResultName].Value).Value = value; } 90 } 91 public double TestMeanAbsoluteError { 92 get { return ((DoubleValue)this[TestMeanAbsoluteErrorResultName].Value).Value; } 93 private set { ((DoubleValue)this[TestMeanAbsoluteErrorResultName].Value).Value = value; } 94 } 95 public double TrainingRSquared { 96 get { return ((DoubleValue)this[TrainingSquaredCorrelationResultName].Value).Value; } 97 private set { ((DoubleValue)this[TrainingSquaredCorrelationResultName].Value).Value = value; } 98 } 99 public double TestRSquared { 100 get { return ((DoubleValue)this[TestSquaredCorrelationResultName].Value).Value; } 101 private set { ((DoubleValue)this[TestSquaredCorrelationResultName].Value).Value = value; } 102 } 103 public double TrainingRelativeError { 104 get { return ((DoubleValue)this[TrainingRelativeErrorResultName].Value).Value; } 105 private set { ((DoubleValue)this[TrainingRelativeErrorResultName].Value).Value = value; } 106 } 107 public double TestRelativeError { 108 get { return ((DoubleValue)this[TestRelativeErrorResultName].Value).Value; } 109 private set { ((DoubleValue)this[TestRelativeErrorResultName].Value).Value = value; } 110 } 111 public double TrainingNormalizedMeanSquaredError { 112 get { return ((DoubleValue)this[TrainingNormalizedMeanSquaredErrorResultName].Value).Value; } 113 private set { ((DoubleValue)this[TrainingNormalizedMeanSquaredErrorResultName].Value).Value = value; } 114 } 115 public double TestNormalizedMeanSquaredError { 116 get { return ((DoubleValue)this[TestNormalizedMeanSquaredErrorResultName].Value).Value; } 117 private set { ((DoubleValue)this[TestNormalizedMeanSquaredErrorResultName].Value).Value = value; } 118 } 119 public double TrainingDirectionalSymmetry { 120 get { return ((DoubleValue)this[TrainingDirectionalSymmetryResultName].Value).Value; } 121 private set { ((DoubleValue)this[TrainingDirectionalSymmetryResultName].Value).Value = value; } 122 } 123 public double TestDirectionalSymmetry { 124 get { return ((DoubleValue)this[TestDirectionalSymmetryResultName].Value).Value; } 125 private set { ((DoubleValue)this[TestDirectionalSymmetryResultName].Value).Value = value; } 126 } 127 public double TrainingWeightedDirectionalSymmetry { 128 get { return ((DoubleValue)this[TrainingWeightedDirectionalSymmetryResultName].Value).Value; } 129 private set { ((DoubleValue)this[TrainingWeightedDirectionalSymmetryResultName].Value).Value = value; } 130 } 131 public double TestWeightedDirectionalSymmetry { 132 get { return ((DoubleValue)this[TestWeightedDirectionalSymmetryResultName].Value).Value; } 133 private set { ((DoubleValue)this[TestWeightedDirectionalSymmetryResultName].Value).Value = value; } 134 } 135 public double TrainingTheilsUStatisticLast { 136 get { return ((DoubleValue)this[TrainingTheilsUStatisticLastResultName].Value).Value; } 137 private set { ((DoubleValue)this[TrainingTheilsUStatisticLastResultName].Value).Value = value; } 138 } 139 public double TestTheilsUStatisticLast { 140 get { return ((DoubleValue)this[TestTheilsUStatisticLastResultName].Value).Value; } 141 private set { ((DoubleValue)this[TestTheilsUStatisticLastResultName].Value).Value = value; } 142 } 143 public double TrainingTheilsUStatisticMean { 144 get { return ((DoubleValue)this[TrainingTheilsUStatisticMeanResultName].Value).Value; } 145 private set { ((DoubleValue)this[TrainingTheilsUStatisticMeanResultName].Value).Value = value; } 146 } 147 public double TestTheilsUStatisticMean { 148 get { return ((DoubleValue)this[TestTheilsUStatisticMeanResultName].Value).Value; } 149 private set { ((DoubleValue)this[TestTheilsUStatisticMeanResultName].Value).Value = value; } 150 } 151 public double TrainingTheilsUStatisticMovingAverage { 152 get { return ((DoubleValue)this[TrainingTheilsUStatisticMaResultName].Value).Value; } 153 private set { ((DoubleValue)this[TrainingTheilsUStatisticMaResultName].Value).Value = value; } 154 } 155 public double TestTheilsUStatisticMovingAverage { 156 get { return ((DoubleValue)this[TestTheilsUStatisticMaResultName].Value).Value; } 157 private set { ((DoubleValue)this[TestTheilsUStatisticMaResultName].Value).Value = value; } 161 158 } 162 159 #endregion … … 170 167 protected TimeSeriesPrognosisSolutionBase(ITimeSeriesPrognosisModel model, ITimeSeriesPrognosisProblemData problemData) 171 168 : base(model, problemData) { 172 Add(new Result(TrainingMeanSquaredErrorResultName, "Mean of squared errors of the model on the training partition", new Double Array()));173 Add(new Result(TestMeanSquaredErrorResultName, "Mean of squared errors of the model on the test partition", new Double Array()));174 Add(new Result(TrainingMeanAbsoluteErrorResultName, "Mean of absolute errors of the model on the training partition", new Double Array()));175 Add(new Result(TestMeanAbsoluteErrorResultName, "Mean of absolute errors of the model on the test partition", new Double Array()));176 Add(new Result(TrainingSquaredCorrelationResultName, "Squared Pearson's correlation coefficient of the model output and the actual values on the training partition", new Double Array()));177 Add(new Result(TestSquaredCorrelationResultName, "Squared Pearson's correlation coefficient of the model output and the actual values on the test partition", new Double Array()));178 Add(new Result(TrainingRelativeErrorResultName, "Average of the relative errors of the model output and the actual values on the training partition", new Double Array()));179 Add(new Result(TestRelativeErrorResultName, "Average of the relative errors of the model output and the actual values on the test partition", new Double Array()));180 Add(new Result(TrainingNormalizedMeanSquaredErrorResultName, "Normalized mean of squared errors of the model on the training partition", new Double Array()));181 Add(new Result(TestNormalizedMeanSquaredErrorResultName, "Normalized mean of squared errors of the model on the test partition", new Double Array()));182 Add(new Result(TrainingDirectionalSymmetryResultName, "The average directional symmetry of the forecasts of the model on the training partition", new Double Array()));183 Add(new Result(TestDirectionalSymmetryResultName, "The average directional symmetry of the forecasts of the model on the test partition", new Double Array()));184 Add(new Result(TrainingWeightedDirectionalSymmetryResultName, "The average weighted directional symmetry of the forecasts of the model on the training partition", new Double Array()));185 Add(new Result(TestWeightedDirectionalSymmetryResultName, "The average weighted directional symmetry of the forecasts of the model on the test partition", new Double Array()));186 Add(new Result(TrainingTheilsUStatisticLastResultName, "The average Theil's U statistic (reference: previous value) of the forecasts of the model on the training partition", new Double Array()));187 Add(new Result(TestTheilsUStatisticLastResultName, "The average Theil's U statistic (reference: previous value) of the forecasts of the model on the test partition", new Double Array()));188 Add(new Result(TrainingTheilsUStatisticMeanResultName, "The average Theil's U statistic (reference: mean value) of the forecasts of the model on the training partition", new Double Array()));189 Add(new Result(TestTheilsUStatisticMeanResultName, "The average Theil's U statistic (reference: mean value) of the forecasts of the model on the test partition", new Double Array()));190 Add(new Result(TrainingTheilsUStatisticMaResultName, "The average Theil's U statistic (reference: moving average) of the forecasts of the model on the training partition", new Double Array()));191 Add(new Result(TestTheilsUStatisticMaResultName, "The average Theil's U statistic (reference: moving average) of the forecasts of the model on the test partition", new Double Array()));169 Add(new Result(TrainingMeanSquaredErrorResultName, "Mean of squared errors of the model on the training partition", new DoubleValue())); 170 Add(new Result(TestMeanSquaredErrorResultName, "Mean of squared errors of the model on the test partition", new DoubleValue())); 171 Add(new Result(TrainingMeanAbsoluteErrorResultName, "Mean of absolute errors of the model on the training partition", new DoubleValue())); 172 Add(new Result(TestMeanAbsoluteErrorResultName, "Mean of absolute errors of the model on the test partition", new DoubleValue())); 173 Add(new Result(TrainingSquaredCorrelationResultName, "Squared Pearson's correlation coefficient of the model output and the actual values on the training partition", new DoubleValue())); 174 Add(new Result(TestSquaredCorrelationResultName, "Squared Pearson's correlation coefficient of the model output and the actual values on the test partition", new DoubleValue())); 175 Add(new Result(TrainingRelativeErrorResultName, "Average of the relative errors of the model output and the actual values on the training partition", new DoubleValue())); 176 Add(new Result(TestRelativeErrorResultName, "Average of the relative errors of the model output and the actual values on the test partition", new DoubleValue())); 177 Add(new Result(TrainingNormalizedMeanSquaredErrorResultName, "Normalized mean of squared errors of the model on the training partition", new DoubleValue())); 178 Add(new Result(TestNormalizedMeanSquaredErrorResultName, "Normalized mean of squared errors of the model on the test partition", new DoubleValue())); 179 Add(new Result(TrainingDirectionalSymmetryResultName, "The average directional symmetry of the forecasts of the model on the training partition", new DoubleValue())); 180 Add(new Result(TestDirectionalSymmetryResultName, "The average directional symmetry of the forecasts of the model on the test partition", new DoubleValue())); 181 Add(new Result(TrainingWeightedDirectionalSymmetryResultName, "The average weighted directional symmetry of the forecasts of the model on the training partition", new DoubleValue())); 182 Add(new Result(TestWeightedDirectionalSymmetryResultName, "The average weighted directional symmetry of the forecasts of the model on the test partition", new DoubleValue())); 183 Add(new Result(TrainingTheilsUStatisticLastResultName, "The average Theil's U statistic (reference: previous value) of the forecasts of the model on the training partition", new DoubleValue())); 184 Add(new Result(TestTheilsUStatisticLastResultName, "The average Theil's U statistic (reference: previous value) of the forecasts of the model on the test partition", new DoubleValue())); 185 Add(new Result(TrainingTheilsUStatisticMeanResultName, "The average Theil's U statistic (reference: mean value) of the forecasts of the model on the training partition", new DoubleValue())); 186 Add(new Result(TestTheilsUStatisticMeanResultName, "The average Theil's U statistic (reference: mean value) of the forecasts of the model on the test partition", new DoubleValue())); 187 Add(new Result(TrainingTheilsUStatisticMaResultName, "The average Theil's U statistic (reference: moving average) of the forecasts of the model on the training partition", new DoubleValue())); 188 Add(new Result(TestTheilsUStatisticMaResultName, "The average Theil's U statistic (reference: moving average) of the forecasts of the model on the test partition", new DoubleValue())); 192 189 horizon = 1; 193 190 } 194 191 195 [StorableHook(HookType.AfterDeserialization)]196 private void AfterDeserialization() {197 if (horizon == 0) horizon = 1;198 bool anyNewResult = false;199 if (!ContainsKey(TrainingTheilsUStatisticLastResultName)) {200 Add(new Result(TrainingTheilsUStatisticLastResultName, "The average Theil's U statistic (reference: previous value) of the forecasts of the model on the training partition", new DoubleArray()));201 anyNewResult = true;202 }203 if (!ContainsKey(TestTheilsUStatisticLastResultName)) {204 Add(new Result(TestTheilsUStatisticLastResultName, "The average Theil's U statistic (reference: previous value) of the forecasts of the model on the test partition", new DoubleArray()));205 anyNewResult = true;206 }207 if (!ContainsKey(TrainingTheilsUStatisticMeanResultName)) {208 Add(new Result(TrainingTheilsUStatisticMeanResultName, "The average Theil's U statistic (reference: mean value) of the forecasts of the model on the training partition", new DoubleArray()));209 anyNewResult = true;210 }211 if (!ContainsKey(TestTheilsUStatisticMeanResultName)) {212 Add(new Result(TestTheilsUStatisticMeanResultName, "The average Theil's U statistic (reference: mean value) of the forecasts of the model on the test partition", new DoubleArray()));213 anyNewResult = true;214 }215 if (!ContainsKey(TrainingTheilsUStatisticMaResultName)) {216 Add(new Result(TrainingTheilsUStatisticMaResultName, "The average Theil's U statistic (reference: moving average) of the forecasts of the model on the training partition", new DoubleArray()));217 anyNewResult = true;218 }219 if (!ContainsKey(TestTheilsUStatisticMaResultName)) {220 Add(new Result(TestTheilsUStatisticMaResultName, "The average Theil's U statistic (reference: moving average) of the forecasts of the model on the test partition", new DoubleArray()));221 anyNewResult = true;222 }223 if (anyNewResult)224 RecalculateResults();225 }226 192 227 193 protected void CalculateResults() { 228 string[] targetVariables = ProblemData.TargetVariables.ToArray(); 229 230 var trainingMseCalculators = new OnlineMeanSquaredErrorCalculator[targetVariables.Length]; 231 var testMseCalculators = new OnlineMeanSquaredErrorCalculator[targetVariables.Length]; 232 var trainingMaeCalculators = new OnlineMeanAbsoluteErrorCalculator[targetVariables.Length]; 233 var testMaeCalculators = new OnlineMeanAbsoluteErrorCalculator[targetVariables.Length]; 234 var trainingRSquaredCalculators = new OnlinePearsonsRSquaredCalculator[targetVariables.Length]; 235 var testRSquaredCalculators = new OnlinePearsonsRSquaredCalculator[targetVariables.Length]; 236 var trainingRelErrorCalculators = new OnlineMeanAbsolutePercentageErrorCalculator[targetVariables.Length]; 237 var testRelErrorCalculators = new OnlineMeanAbsolutePercentageErrorCalculator[targetVariables.Length]; 238 var trainingNmseCalculators = new OnlineNormalizedMeanSquaredErrorCalculator[targetVariables.Length]; 239 var testNmseCalculators = new OnlineNormalizedMeanSquaredErrorCalculator[targetVariables.Length]; 240 241 var trainingDsCalculators = new OnlineDirectionalSymmetryCalculator[targetVariables.Length]; 242 var testDsCalculators = new OnlineDirectionalSymmetryCalculator[targetVariables.Length]; 243 var trainingWdsCalculators = new OnlineWeightedDirectionalSymmetryCalculator[targetVariables.Length]; 244 var testWdsCalculators = new OnlineWeightedDirectionalSymmetryCalculator[targetVariables.Length]; 245 var trainingTheilsULastCalculators = new OnlineTheilsUStatisticCalculator[targetVariables.Length]; 246 var testTheilsULastCalculators = new OnlineTheilsUStatisticCalculator[targetVariables.Length]; 247 var trainingTheilsUMeanCalculators = new OnlineTheilsUStatisticCalculator[targetVariables.Length]; 248 var testTheilsUMeanCalculators = new OnlineTheilsUStatisticCalculator[targetVariables.Length]; 249 var trainingTheilsUMovingAverageCalculators = new OnlineTheilsUStatisticCalculator[targetVariables.Length]; 250 var testTheilsUMovingAverageCalculators = new OnlineTheilsUStatisticCalculator[targetVariables.Length]; 251 for (int i = 0; i < targetVariables.Length; i++) { 252 trainingMseCalculators[i] = new OnlineMeanSquaredErrorCalculator(); 253 testMseCalculators[i] = new OnlineMeanSquaredErrorCalculator(); 254 trainingMaeCalculators[i] = new OnlineMeanAbsoluteErrorCalculator(); 255 testMaeCalculators[i] = new OnlineMeanAbsoluteErrorCalculator(); 256 trainingRSquaredCalculators[i] = new OnlinePearsonsRSquaredCalculator(); 257 testRSquaredCalculators[i] = new OnlinePearsonsRSquaredCalculator(); 258 trainingRelErrorCalculators[i] = new OnlineMeanAbsolutePercentageErrorCalculator(); 259 testRelErrorCalculators[i] = new OnlineMeanAbsolutePercentageErrorCalculator(); 260 trainingNmseCalculators[i] = new OnlineNormalizedMeanSquaredErrorCalculator(); 261 testNmseCalculators[i] = new OnlineNormalizedMeanSquaredErrorCalculator(); 262 263 trainingDsCalculators[i] = new OnlineDirectionalSymmetryCalculator(); 264 testDsCalculators[i] = new OnlineDirectionalSymmetryCalculator(); 265 trainingWdsCalculators[i] = new OnlineWeightedDirectionalSymmetryCalculator(); 266 testWdsCalculators[i] = new OnlineWeightedDirectionalSymmetryCalculator(); 267 trainingTheilsULastCalculators[i] = new OnlineTheilsUStatisticCalculator(); 268 testTheilsULastCalculators[i] = new OnlineTheilsUStatisticCalculator(); 269 trainingTheilsUMeanCalculators[i] = new OnlineTheilsUStatisticCalculator(); 270 testTheilsUMeanCalculators[i] = new OnlineTheilsUStatisticCalculator(); 271 trainingTheilsUMovingAverageCalculators[i] = new OnlineTheilsUStatisticCalculator(); 272 testTheilsUMovingAverageCalculators[i] = new OnlineTheilsUStatisticCalculator(); 273 } 274 275 var allPrognosedTrainingValues = GetPrognosedValues(ProblemData.TrainingIndizes, horizon).GetEnumerator(); 276 double[] mean = new double[targetVariables.Length]; 277 for (int t = 0; t < targetVariables.Length; t++) { 278 double variance; 279 OnlineCalculatorError meanErrorState, varErrorState; 280 OnlineMeanAndVarianceCalculator.Calculate(ProblemData.Dataset.GetDoubleValues(targetVariables[t], ProblemData.TrainingIndizes), out mean[t], out variance, out meanErrorState, out varErrorState); 281 if (meanErrorState != OnlineCalculatorError.None) mean[t] = 0.0; 282 } 194 string targetVariable = ProblemData.TargetVariable; 195 196 var trainingMseCalculators = new OnlineMeanSquaredErrorCalculator(); 197 var testMseCalculators = new OnlineMeanSquaredErrorCalculator(); 198 var trainingMaeCalculators = new OnlineMeanAbsoluteErrorCalculator(); 199 var testMaeCalculators = new OnlineMeanAbsoluteErrorCalculator(); 200 var trainingRSquaredCalculators = new OnlinePearsonsRSquaredCalculator(); 201 var testRSquaredCalculators = new OnlinePearsonsRSquaredCalculator(); 202 var trainingRelErrorCalculators = new OnlineMeanAbsolutePercentageErrorCalculator(); 203 var testRelErrorCalculators = new OnlineMeanAbsolutePercentageErrorCalculator(); 204 var trainingNmseCalculators = new OnlineNormalizedMeanSquaredErrorCalculator(); 205 var testNmseCalculators = new OnlineNormalizedMeanSquaredErrorCalculator(); 206 207 var trainingDsCalculators = new OnlineDirectionalSymmetryCalculator(); 208 var testDsCalculators = new OnlineDirectionalSymmetryCalculator(); 209 var trainingWdsCalculators = new OnlineWeightedDirectionalSymmetryCalculator(); 210 var testWdsCalculators = new OnlineWeightedDirectionalSymmetryCalculator(); 211 var trainingTheilsULastCalculators = new OnlineTheilsUStatisticCalculator(); 212 var testTheilsULastCalculators = new OnlineTheilsUStatisticCalculator(); 213 var trainingTheilsUMeanCalculators = new OnlineTheilsUStatisticCalculator(); 214 var testTheilsUMeanCalculators = new OnlineTheilsUStatisticCalculator(); 215 var trainingTheilsUMovingAverageCalculators = new OnlineTheilsUStatisticCalculator(); 216 var testTheilsUMovingAverageCalculators = new OnlineTheilsUStatisticCalculator(); 217 218 double mean = ProblemData.Dataset.GetDoubleValues(targetVariable, ProblemData.TrainingIndizes).Average(); 219 283 220 foreach (var row in ProblemData.TrainingIndizes) { 284 221 if (row + horizon < ProblemData.Dataset.Rows) { 285 allPrognosedTrainingValues.MoveNext(); 286 var prognosedTrainingValues = allPrognosedTrainingValues.Current.SelectMany(x => x.ToArray()).ToArray(); 287 for (int t = 0; t < targetVariables.Length; t++) { 288 var actualContinuation = ProblemData.Dataset.GetDoubleValues(targetVariables[t], 289 Enumerable.Range(row, horizon)); 290 int maWindow = 10 * horizon; 291 var movingAverageContinuation = from h in Enumerable.Range(0, horizon) 292 select (from r in Enumerable.Range(row + h - maWindow, maWindow - h) 293 where r > 0 294 select ProblemData.Dataset.GetDoubleValue(targetVariables[t], r) 295 ).Average(); 296 double startValue = ProblemData.Dataset.GetDoubleValue(targetVariables[t], row - 1); 297 var prognosedContinuation = prognosedTrainingValues.Skip(t).TakeEvery(targetVariables.Length); 298 trainingDsCalculators[t].Add(startValue, actualContinuation, prognosedContinuation); 299 trainingWdsCalculators[t].Add(startValue, actualContinuation, prognosedContinuation); 300 trainingTheilsULastCalculators[t].Add(startValue, actualContinuation, prognosedContinuation); 301 trainingTheilsUMeanCalculators[t].Add(startValue, actualContinuation.Select(x => mean[t]), actualContinuation, prognosedContinuation); 302 trainingTheilsUMovingAverageCalculators[t].Add(startValue, movingAverageContinuation, actualContinuation, prognosedContinuation); 303 304 var actualContinuationEnumerator = actualContinuation.GetEnumerator(); 305 var prognosedContinuationEnumerator = prognosedContinuation.GetEnumerator(); 306 while (actualContinuationEnumerator.MoveNext() & prognosedContinuationEnumerator.MoveNext()) { 307 trainingMseCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 308 trainingMaeCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 309 trainingRelErrorCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 310 trainingRSquaredCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 311 trainingNmseCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 312 } 313 if (actualContinuationEnumerator.MoveNext() | prognosedContinuationEnumerator.MoveNext()) 314 throw new ArgumentException( 315 "Different number of elements in Actual continuation and prognosed continuation."); 222 var actualContinuation = ProblemData.Dataset.GetDoubleValues(targetVariable, Enumerable.Range(row, horizon)).ToList(); 223 var prognosedContinuation = GetPrognosedValues(new List<int> { row }, horizon).First().ToList(); 224 225 int maWindow = 10 * horizon; 226 var movingAverageContinuation = from h in Enumerable.Range(0, horizon) 227 select (from r in Enumerable.Range(row + h - maWindow, maWindow - h) 228 where r > 0 229 select ProblemData.Dataset.GetDoubleValue(targetVariable, r) 230 ).Average(); 231 232 double startValue = ProblemData.Dataset.GetDoubleValue(targetVariable, row - 1); 233 234 trainingDsCalculators.Add(startValue, actualContinuation, prognosedContinuation); 235 trainingWdsCalculators.Add(startValue, actualContinuation, prognosedContinuation); 236 trainingTheilsULastCalculators.Add(startValue, actualContinuation, prognosedContinuation); 237 trainingTheilsUMeanCalculators.Add(startValue, actualContinuation.Select(x => mean), actualContinuation, prognosedContinuation); 238 trainingTheilsUMovingAverageCalculators.Add(startValue, movingAverageContinuation, actualContinuation, prognosedContinuation); 239 240 var actualContinuationEnumerator = actualContinuation.GetEnumerator(); 241 var prognosedContinuationEnumerator = prognosedContinuation.GetEnumerator(); 242 243 while (actualContinuationEnumerator.MoveNext() & prognosedContinuationEnumerator.MoveNext()) { 244 trainingMseCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 245 trainingMaeCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 246 trainingRelErrorCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 247 trainingRSquaredCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 248 trainingNmseCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 316 249 } 250 if (actualContinuationEnumerator.MoveNext() | prognosedContinuationEnumerator.MoveNext()) 251 throw new ArgumentException("Different number of elements in Actual continuation and prognosed continuation."); 317 252 } 318 253 } 319 var allPrognosedTestValues = GetPrognosedValues(ProblemData.TestIndizes, horizon).ToArray().AsEnumerable().GetEnumerator(); 254 255 mean = ProblemData.Dataset.GetDoubleValues(targetVariable, ProblemData.TestIndizes).Average(); 320 256 foreach (var row in ProblemData.TestIndizes) { 321 257 if (row + horizon < ProblemData.Dataset.Rows) { 322 allPrognosedTestValues.MoveNext(); 323 var prognosedTestValues = allPrognosedTestValues.Current.SelectMany(x => x); 324 for (int t = 0; t < targetVariables.Length; t++) { 325 var actualContinuation = ProblemData.Dataset.GetDoubleValues(targetVariables[t], 326 Enumerable.Range(row, horizon)); 327 int maWindow = 10 * horizon; 328 var movingAverageContinuation = from h in Enumerable.Range(0, horizon) 329 select (from r in Enumerable.Range(row + h - maWindow, maWindow - h) 330 where r > 0 331 select ProblemData.Dataset.GetDoubleValue(targetVariables[t], r) 332 ).Average(); 333 double startValue = ProblemData.Dataset.GetDoubleValue(targetVariables[t], row - 1); 334 var prognosedContinuation = prognosedTestValues.Skip(t).TakeEvery(targetVariables.Length).ToArray(); 335 testDsCalculators[t].Add(startValue, actualContinuation, prognosedContinuation); 336 testWdsCalculators[t].Add(startValue, actualContinuation, prognosedContinuation); 337 testTheilsULastCalculators[t].Add(startValue, actualContinuation, prognosedContinuation); 338 testTheilsUMeanCalculators[t].Add(startValue, actualContinuation.Select(x => mean[t]), actualContinuation, prognosedContinuation); 339 testTheilsUMovingAverageCalculators[t].Add(startValue, movingAverageContinuation, actualContinuation, prognosedContinuation); 340 341 var actualContinuationEnumerator = actualContinuation.GetEnumerator(); 342 var prognosedContinuationEnumerator = prognosedContinuation.AsEnumerable().GetEnumerator(); 343 while (actualContinuationEnumerator.MoveNext() & prognosedContinuationEnumerator.MoveNext()) { 344 testMseCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 345 testMaeCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 346 testRelErrorCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 347 testRSquaredCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 348 testNmseCalculators[t].Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 349 } 350 if (actualContinuationEnumerator.MoveNext() | prognosedContinuationEnumerator.MoveNext()) 351 throw new ArgumentException( 352 "Different number of elements in Actual continuation and prognosed continuation."); 258 var actualContinuation = ProblemData.Dataset.GetDoubleValues(targetVariable, Enumerable.Range(row, horizon)).ToList(); 259 var prognosedContinuation = GetPrognosedValues(new List<int> { row }, horizon).First().ToList(); 260 261 int maWindow = 10 * horizon; 262 var movingAverageContinuation = from h in Enumerable.Range(0, horizon) 263 select (from r in Enumerable.Range(row + h - maWindow, maWindow - h) 264 where r > 0 265 select ProblemData.Dataset.GetDoubleValue(targetVariable, r) 266 ).Average(); 267 268 double startValue = ProblemData.Dataset.GetDoubleValue(targetVariable, row - 1); 269 testDsCalculators.Add(startValue, actualContinuation, prognosedContinuation); 270 testWdsCalculators.Add(startValue, actualContinuation, prognosedContinuation); 271 testTheilsULastCalculators.Add(startValue, actualContinuation, prognosedContinuation); 272 testTheilsUMeanCalculators.Add(startValue, actualContinuation.Select(x => mean), actualContinuation, prognosedContinuation); 273 testTheilsUMovingAverageCalculators.Add(startValue, movingAverageContinuation, actualContinuation, prognosedContinuation); 274 275 var actualContinuationEnumerator = actualContinuation.GetEnumerator(); 276 var prognosedContinuationEnumerator = prognosedContinuation.GetEnumerator(); 277 while (actualContinuationEnumerator.MoveNext() & prognosedContinuationEnumerator.MoveNext()) { 278 testMseCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 279 testMaeCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 280 testRelErrorCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 281 testRSquaredCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 282 testNmseCalculators.Add(actualContinuationEnumerator.Current, prognosedContinuationEnumerator.Current); 353 283 } 284 if (actualContinuationEnumerator.MoveNext() | prognosedContinuationEnumerator.MoveNext()) 285 throw new ArgumentException("Different number of elements in Actual continuation and prognosed continuation."); 354 286 } 355 287 } 356 288 357 358 TrainingMeanSquaredError = trainingMseCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 359 .ToArray(); 360 TestMeanSquaredError = testMseCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 361 .ToArray(); 362 TrainingMeanAbsoluteError = trainingMaeCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 363 .ToArray(); 364 TestMeanAbsoluteError = testMaeCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 365 .ToArray(); 366 TrainingRelativeError = trainingRelErrorCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 367 .ToArray(); 368 TestRelativeError = testRelErrorCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 369 .ToArray(); 370 TrainingRSquared = trainingRSquaredCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : 0.0) 371 .ToArray(); 372 TestRSquared = testRSquaredCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : 0.0) 373 .ToArray(); 374 TrainingNormalizedMeanSquaredError = trainingNmseCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 375 .ToArray(); 376 TestNormalizedMeanSquaredError = testNmseCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 377 .ToArray(); 378 379 TrainingDirectionalSymmetry = trainingDsCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : 0.0) 380 .ToArray(); 381 TestDirectionalSymmetry = testDsCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : 0.0) 382 .ToArray(); 383 TrainingWeightedDirectionalSymmetry = trainingWdsCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 384 .ToArray(); 385 TestWeightedDirectionalSymmetry = testWdsCalculators.Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 386 .ToArray(); 387 TrainingTheilsUStatisticLast = trainingTheilsULastCalculators 388 .Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 389 .ToArray(); 390 TestTheilsUStatisticLast = testTheilsULastCalculators 391 .Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 392 .ToArray(); 393 TrainingTheilsUStatisticMean = trainingTheilsUMeanCalculators 394 .Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 395 .ToArray(); 396 TestTheilsUStatisticMean = testTheilsUMeanCalculators 397 .Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 398 .ToArray(); 399 TrainingTheilsUStatisticMovingAverage = trainingTheilsUMovingAverageCalculators 400 .Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 401 .ToArray(); 402 TestTheilsUStatisticMovingAverage = testTheilsUMovingAverageCalculators 403 .Select(c => c.ErrorState == OnlineCalculatorError.None ? c.Value : double.PositiveInfinity) 404 .ToArray(); 289 TrainingMeanSquaredError = trainingMseCalculators.ErrorState == OnlineCalculatorError.None ? trainingMseCalculators.Value : double.PositiveInfinity; 290 TestMeanSquaredError = testMseCalculators.ErrorState == OnlineCalculatorError.None ? testMseCalculators.Value : double.PositiveInfinity; 291 TrainingMeanAbsoluteError = trainingMaeCalculators.ErrorState == OnlineCalculatorError.None ? trainingMaeCalculators.Value : double.PositiveInfinity; 292 TestMeanAbsoluteError = testMaeCalculators.ErrorState == OnlineCalculatorError.None ? testMaeCalculators.Value : double.PositiveInfinity; 293 TrainingRelativeError = trainingRelErrorCalculators.ErrorState == OnlineCalculatorError.None ? trainingRelErrorCalculators.Value : double.PositiveInfinity; 294 TestRelativeError = testRelErrorCalculators.ErrorState == OnlineCalculatorError.None ? testRelErrorCalculators.Value : double.PositiveInfinity; 295 TrainingRSquared = trainingRSquaredCalculators.ErrorState == OnlineCalculatorError.None ? trainingRSquaredCalculators.Value : 0.0; 296 TestRSquared = testRSquaredCalculators.ErrorState == OnlineCalculatorError.None ? testRSquaredCalculators.Value : 0.0; 297 TrainingNormalizedMeanSquaredError = trainingNmseCalculators.ErrorState == OnlineCalculatorError.None ? trainingNmseCalculators.Value : double.PositiveInfinity; 298 TestNormalizedMeanSquaredError = testNmseCalculators.ErrorState == OnlineCalculatorError.None ? testNmseCalculators.Value : double.PositiveInfinity; 299 300 TrainingDirectionalSymmetry = trainingDsCalculators.ErrorState == OnlineCalculatorError.None ? trainingDsCalculators.Value : 0.0; 301 TestDirectionalSymmetry = testDsCalculators.ErrorState == OnlineCalculatorError.None ? testDsCalculators.Value : 0.0; 302 TrainingWeightedDirectionalSymmetry = trainingWdsCalculators.ErrorState == OnlineCalculatorError.None ? trainingWdsCalculators.Value : double.PositiveInfinity; 303 TestWeightedDirectionalSymmetry = testWdsCalculators.ErrorState == OnlineCalculatorError.None ? testWdsCalculators.Value : double.PositiveInfinity; 304 TrainingTheilsUStatisticLast = trainingTheilsULastCalculators.ErrorState == OnlineCalculatorError.None ? trainingTheilsULastCalculators.Value : double.PositiveInfinity; 305 TestTheilsUStatisticLast = testTheilsULastCalculators.ErrorState == OnlineCalculatorError.None ? testTheilsULastCalculators.Value : double.PositiveInfinity; 306 TrainingTheilsUStatisticMean = trainingTheilsUMeanCalculators.ErrorState == OnlineCalculatorError.None ? trainingTheilsUMeanCalculators.Value : double.PositiveInfinity; 307 TestTheilsUStatisticMean = testTheilsUMeanCalculators.ErrorState == OnlineCalculatorError.None ? testTheilsUMeanCalculators.Value : double.PositiveInfinity; 308 TrainingTheilsUStatisticMovingAverage = trainingTheilsUMovingAverageCalculators.ErrorState == OnlineCalculatorError.None ? trainingTheilsUMovingAverageCalculators.Value : double.PositiveInfinity; 309 TestTheilsUStatisticMovingAverage = testTheilsUMovingAverageCalculators.ErrorState == OnlineCalculatorError.None ? testTheilsUMovingAverageCalculators.Value : double.PositiveInfinity; 405 310 } 406 311 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/TimeSeriesPrognosis/ITimeSeriesPrognosisModel.cs
r7100 r7989 23 23 namespace HeuristicLab.Problems.DataAnalysis { 24 24 public interface ITimeSeriesPrognosisModel : IDataAnalysisModel { 25 IEnumerable<IEnumerable< IEnumerable<double>>> GetPrognosedValues(Dataset dataset, IEnumerable<int> rows, int horizon);25 IEnumerable<IEnumerable<double>> GetPrognosedValues(Dataset dataset, IEnumerable<int> rows, int horizon); 26 26 ITimeSeriesPrognosisSolution CreateTimeSeriesPrognosisSolution(ITimeSeriesPrognosisProblemData problemData); 27 27 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/TimeSeriesPrognosis/ITimeSeriesPrognosisProblemData.cs
r7100 r7989 20 20 #endregion 21 21 22 using System.Collections.Generic;23 22 namespace HeuristicLab.Problems.DataAnalysis { 24 23 public interface ITimeSeriesPrognosisProblemData : IDataAnalysisProblemData { 25 IEnumerable<string> TargetVariables{ get; }24 string TargetVariable { get; } 26 25 } 27 26 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/TimeSeriesPrognosis/ITimeSeriesPrognosisSolution.cs
r7160 r7989 27 27 new ITimeSeriesPrognosisProblemData ProblemData { get; set; } 28 28 29 IEnumerable<IEnumerable<double>> PrognosedTrainingValues { get; } 30 IEnumerable<IEnumerable<double>> PrognosedTestValues { get; } 31 IEnumerable<IEnumerable<IEnumerable<double>>> GetPrognosedValues(IEnumerable<int> rows, int horizon); 29 IEnumerable<IEnumerable<double>> GetPrognosedValues(IEnumerable<int> rows, int horizon); 32 30 33 double []TrainingMeanSquaredError { get; }34 double []TestMeanSquaredError { get; }35 double []TrainingMeanAbsoluteError { get; }36 double []TestMeanAbsoluteError { get; }37 double []TrainingRSquared { get; }38 double []TestRSquared { get; }39 double []TrainingRelativeError { get; }40 double []TestRelativeError { get; }41 double []TrainingNormalizedMeanSquaredError { get; }42 double []TestNormalizedMeanSquaredError { get; }43 double []TrainingTheilsUStatisticLast { get; }44 double []TestTheilsUStatisticLast { get; }45 double []TrainingTheilsUStatisticMean { get; }46 double []TestTheilsUStatisticMean { get; }47 double []TrainingDirectionalSymmetry { get; }48 double []TestDirectionalSymmetry { get; }49 double []TrainingWeightedDirectionalSymmetry { get; }50 double []TestWeightedDirectionalSymmetry { get; }31 double TrainingMeanSquaredError { get; } 32 double TestMeanSquaredError { get; } 33 double TrainingMeanAbsoluteError { get; } 34 double TestMeanAbsoluteError { get; } 35 double TrainingRSquared { get; } 36 double TestRSquared { get; } 37 double TrainingRelativeError { get; } 38 double TestRelativeError { get; } 39 double TrainingNormalizedMeanSquaredError { get; } 40 double TestNormalizedMeanSquaredError { get; } 41 double TrainingTheilsUStatisticLast { get; } 42 double TestTheilsUStatisticLast { get; } 43 double TrainingTheilsUStatisticMean { get; } 44 double TestTheilsUStatisticMean { get; } 45 double TrainingDirectionalSymmetry { get; } 46 double TestDirectionalSymmetry { get; } 47 double TrainingWeightedDirectionalSymmetry { get; } 48 double TestWeightedDirectionalSymmetry { get; } 51 49 } 52 50 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/TimeSeries/TimeSeriesPrognosisInstanceProvider.cs
r7890 r7989 40 40 IEnumerable<string> allowedInputVars = csvFileParser.VariableNames.Where(x => !x.Equals(targetVar)); 41 41 42 ITimeSeriesPrognosisProblemData regData = new TimeSeriesPrognosisProblemData(dataset, allowedInputVars, new List<string> { targetVar });42 ITimeSeriesPrognosisProblemData regData = new TimeSeriesPrognosisProblemData(dataset, allowedInputVars, targetVar); 43 43 44 44 int trainingPartEnd = csvFileParser.Rows * 2 / 3;
Note: See TracChangeset
for help on using the changeset viewer.