Changeset 16215 for branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
- Timestamp:
- 10/08/18 08:17:03 (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16214 r16215 43 43 44 44 public static Vector operator +(Vector a, Vector b) { 45 if (a == Zero) return b;46 if (b == Zero) return a;45 if (a == Zero) return b; 46 if (b == Zero) return a; 47 47 Debug.Assert(a.arr.Length == b.arr.Length); 48 48 var res = new double[a.arr.Length]; 49 for (int i = 0; i < res.Length; i++)49 for (int i = 0; i < res.Length; i++) 50 50 res[i] = a.arr[i] + b.arr[i]; 51 51 return new Vector(res); 52 52 } 53 53 public static Vector operator -(Vector a, Vector b) { 54 if (b == Zero) return a;55 if (a == Zero) return -b;54 if (b == Zero) return a; 55 if (a == Zero) return -b; 56 56 Debug.Assert(a.arr.Length == b.arr.Length); 57 57 var res = new double[a.arr.Length]; 58 for (int i = 0; i < res.Length; i++)58 for (int i = 0; i < res.Length; i++) 59 59 res[i] = a.arr[i] - b.arr[i]; 60 60 return new Vector(res); 61 61 } 62 62 public static Vector operator -(Vector v) { 63 if (v == Zero) return Zero;64 for (int i = 0; i < v.arr.Length; i++)63 if (v == Zero) return Zero; 64 for (int i = 0; i < v.arr.Length; i++) 65 65 v.arr[i] = -v.arr[i]; 66 66 return v; … … 68 68 69 69 public static Vector operator *(double s, Vector v) { 70 if (v == Zero) return Zero;71 if (s == 0.0) return Zero;70 if (v == Zero) return Zero; 71 if (s == 0.0) return Zero; 72 72 var res = new double[v.arr.Length]; 73 for (int i = 0; i < res.Length; i++)73 for (int i = 0; i < res.Length; i++) 74 74 res[i] = s * v.arr[i]; 75 75 return new Vector(res); 76 76 } 77 77 78 public static Vector operator *(Vector v, double s) { 78 79 return s * v; 79 80 } 81 public static Vector operator *(Vector u, Vector v) { 82 if (v == Zero) return Zero; 83 if (u == Zero) return Zero; 84 var res = new double[v.arr.Length]; 85 for (int i = 0; i < res.Length; i++) 86 res[i] = u.arr[i] * v.arr[i]; 87 return new Vector(res); 88 } 80 89 public static Vector operator /(double s, Vector v) { 81 if (s == 0.0) return Zero;82 if (v == Zero) throw new ArgumentException("Division by zero vector");90 if (s == 0.0) return Zero; 91 if (v == Zero) throw new ArgumentException("Division by zero vector"); 83 92 var res = new double[v.arr.Length]; 84 for (int i = 0; i < res.Length; i++)93 for (int i = 0; i < res.Length; i++) 85 94 res[i] = 1.0 / v.arr[i]; 86 95 return new Vector(res); … … 90 99 } 91 100 101 public static Vector Sin(Vector s) { 102 var res = new double[s.arr.Length]; 103 for (int i = 0; i < res.Length; i++) res[i] = Math.Sin(s.arr[i]); 104 return new Vector(res); 105 } 106 public static Vector Cos(Vector s) { 107 var res = new double[s.arr.Length]; 108 for (int i = 0; i < res.Length; i++) res[i] = Math.Cos(s.arr[i]); 109 return new Vector(res); 110 } 92 111 93 112 private readonly double[] arr; // backing array; … … 200 219 [StorableHook(HookType.AfterDeserialization)] 201 220 private void AfterDeserialization() { 202 if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {221 if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) { 203 222 Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false))); 204 223 } … … 232 251 // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables) 233 252 // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item 253 // TODO: use training range as default training episode 234 254 235 255 } … … 238 258 var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual 239 259 240 if (OptimizeParametersForEpisodes) {241 int eIdx = 0; 260 if (OptimizeParametersForEpisodes) { 261 int eIdx = 0; 242 262 double totalNMSE = 0.0; 243 263 int totalSize = 0; 244 foreach (var episode in TrainingEpisodes) {264 foreach (var episode in TrainingEpisodes) { 245 265 double[] optTheta; 246 266 double nmse; … … 270 290 // collect values of all target variables 271 291 var colIdx = 0; 272 foreach (var targetVar in targetVars) {292 foreach (var targetVar in targetVars) { 273 293 int rowIdx = 0; 274 foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {294 foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) { 275 295 targetValues[rowIdx, colIdx] = value; 276 296 rowIdx++; … … 281 301 var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>(); 282 302 283 foreach (var tree in trees) {284 foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {303 foreach (var tree in trees) { 304 foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) { 285 305 nodeIdx.Add(node, nodeIdx.Count); 286 306 } … … 290 310 291 311 optTheta = new double[0]; 292 if (theta.Length > 0) {312 if (theta.Length > 0) { 293 313 alglib.minlbfgsstate state; 294 314 alglib.minlbfgsreport report; … … 325 345 * NFEV countains number of function calculations 326 346 */ 327 if (report.terminationtype < 0) { nmse = 10E6; return; }347 if (report.terminationtype < 0) { nmse = 10E6; return; } 328 348 } 329 349 … … 333 353 EvaluateObjectiveAndGradient(optTheta, ref nmse, grad, 334 354 new object[] { trees, targetVars, problemData, nodeIdx, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables }); 335 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE)355 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE) 336 356 } 337 357 … … 371 391 var g = Vector.Zero; 372 392 int r = 0; 373 foreach (var y_pred in predicted) {374 for (int c = 0; c < y_pred.Length; c++) {393 foreach (var y_pred in predicted) { 394 for (int c = 0; c < y_pred.Length; c++) { 375 395 376 396 var y_pred_f = y_pred[c].Item1; … … 391 411 base.Analyze(individuals, qualities, results, random); 392 412 393 if (!results.ContainsKey("Prediction (training)")) {413 if (!results.ContainsKey("Prediction (training)")) { 394 414 results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>))); 395 415 } 396 if (!results.ContainsKey("Prediction (test)")) {416 if (!results.ContainsKey("Prediction (test)")) { 397 417 results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>))); 398 418 } 399 if (!results.ContainsKey("Models")) {419 if (!results.ContainsKey("Models")) { 400 420 results.Add(new Result("Models", typeof(VariableCollection))); 401 421 } … … 406 426 // TODO extract common functionality from Evaluate and Analyze 407 427 var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>(); 408 foreach (var tree in trees) {409 foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {428 foreach (var tree in trees) { 429 foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) { 410 430 nodeIdx.Add(node, nodeIdx.Count); 411 431 } … … 417 437 var trainingList = new ItemList<DataTable>(); 418 438 419 if (OptimizeParametersForEpisodes) {439 if (OptimizeParametersForEpisodes) { 420 440 var eIdx = 0; 421 441 var trainingPredictions = new List<Tuple<double, Vector>[][]>(); 422 foreach (var episode in TrainingEpisodes) {442 foreach (var episode in TrainingEpisodes) { 423 443 var episodes = new[] { episode }; 424 444 var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate … … 439 459 // only for actual target values 440 460 var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)); 441 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {461 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 442 462 var targetVar = targetVars[colIdx]; 443 463 var trainingDataTable = new DataTable(targetVar + " prediction (training)"); … … 453 473 var models = new VariableCollection(); 454 474 455 foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {475 foreach (var tup in targetVars.Zip(trees, Tuple.Create)) { 456 476 var targetVarName = tup.Item1; 457 477 var tree = tup.Item2; … … 476 496 // only for actual target values 477 497 var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)); 478 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {498 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 479 499 var targetVar = targetVars[colIdx]; 480 500 var trainingDataTable = new DataTable(targetVar + " prediction (training)"); … … 499 519 NumericIntegrationSteps).ToArray(); 500 520 501 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {521 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 502 522 var targetVar = targetVars[colIdx]; 503 523 var testDataTable = new DataTable(targetVar + " prediction (test)"); … … 515 535 var models = new VariableCollection(); // to store target var names and original version of tree 516 536 517 foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {537 foreach (var tup in targetVars.Zip(trees, Tuple.Create)) { 518 538 var targetVarName = tup.Item1; 519 539 var tree = tup.Item2; … … 553 573 private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) { 554 574 ISymbolicExpressionTreeNode translatedNode = null; 555 if (n.Symbol is StartSymbol) {575 if (n.Symbol is StartSymbol) { 556 576 translatedNode = new StartSymbol().CreateTreeNode(); 557 } else if (n.Symbol is ProgramRootSymbol) {577 } else if (n.Symbol is ProgramRootSymbol) { 558 578 translatedNode = new ProgramRootSymbol().CreateTreeNode(); 559 } else if (n.Symbol.Name == "+") {579 } else if (n.Symbol.Name == "+") { 560 580 translatedNode = new Addition().CreateTreeNode(); 561 } else if (n.Symbol.Name == "-") {581 } else if (n.Symbol.Name == "-") { 562 582 translatedNode = new Subtraction().CreateTreeNode(); 563 } else if (n.Symbol.Name == "*") {583 } else if (n.Symbol.Name == "*") { 564 584 translatedNode = new Multiplication().CreateTreeNode(); 565 } else if (n.Symbol.Name == "%") {585 } else if (n.Symbol.Name == "%") { 566 586 translatedNode = new Division().CreateTreeNode(); 567 } else if(IsConstantNode(n)) { 587 } else if (n.Symbol.Name == "sin") { 588 translatedNode = new Sine().CreateTreeNode(); 589 } else if (n.Symbol.Name == "cos") { 590 translatedNode = new Cosine().CreateTreeNode(); 591 } else if (IsConstantNode(n)) { 568 592 var constNode = (ConstantTreeNode)new Constant().CreateTreeNode(); 569 593 constNode.Value = parameterValues[nextParIdx]; … … 578 602 translatedNode = varNode; 579 603 } 580 foreach (var child in n.Subtrees) {604 foreach (var child in n.Subtrees) { 581 605 translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx)); 582 606 } … … 592 616 double h = 1.0 / NUM_STEPS; 593 617 594 foreach (var episode in episodes) {618 foreach (var episode in episodes) { 595 619 var rows = Enumerable.Range(episode.Start, episode.End - episode.Start); 596 620 // return first value as stored in the dataset … … 603 627 var variableValues = new Dictionary<string, Tuple<double, Vector>>(); 604 628 var t0 = rows.First(); 605 foreach (var varName in inputVariables) {629 foreach (var varName in inputVariables) { 606 630 variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero)); 607 631 } 608 foreach (var varName in targetVariables) {632 foreach (var varName in targetVariables) { 609 633 variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero)); 610 634 } 611 635 // add value entries for latent variables which are also integrated 612 foreach (var latentVar in latentVariables) {636 foreach (var latentVar in latentVariables) { 613 637 variableValues.Add(latentVar, Tuple.Create(0.0, Vector.Zero)); // we don't have observations for latent variables -> assume zero as starting value 614 638 } 615 639 var calculatedVariables = targetVariables.Concat(latentVariables); // TODO: must conincide with the order of trees in the encoding 616 640 617 foreach (var t in rows.Skip(1)) {618 for (int step = 0; step < NUM_STEPS; step++) {641 foreach (var t in rows.Skip(1)) { 642 for (int step = 0; step < NUM_STEPS; step++) { 619 643 var deltaValues = new Dictionary<string, Tuple<double, Vector>>(); 620 foreach (var tup in trees.Zip(calculatedVariables, Tuple.Create)) {644 foreach (var tup in trees.Zip(calculatedVariables, Tuple.Create)) { 621 645 var tree = tup.Item1; 622 646 var targetVarName = tup.Item2; … … 627 651 628 652 // update variableValues for next step 629 foreach (var kvp in deltaValues) {653 foreach (var kvp in deltaValues) { 630 654 var oldVal = variableValues[kvp.Key]; 631 655 variableValues[kvp.Key] = Tuple.Create( … … 642 666 643 667 // update for next time step 644 foreach (var varName in inputVariables) {668 foreach (var varName in inputVariables) { 645 669 variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero); 646 670 } … … 656 680 ) { 657 681 658 switch (node.Symbol.Name) {682 switch (node.Symbol.Name) { 659 683 case "+": { 660 684 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); // TODO capture all parameters into a state type for interpretation … … 681 705 682 706 // protected division 683 if (r.Item1.IsAlmost(0.0)) {707 if (r.Item1.IsAlmost(0.0)) { 684 708 return Tuple.Create(0.0, Vector.Zero); 685 709 } else { … … 690 714 } 691 715 } 716 case "sin": { 717 var x = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); 718 return Tuple.Create( 719 Math.Sin(x.Item1), 720 Vector.Cos(x.Item2) * x.Item2 721 ); 722 } 723 case "cos": { 724 var x = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); 725 return Tuple.Create( 726 Math.Cos(x.Item1), 727 -Vector.Sin(x.Item2) * x.Item2 728 ); 729 } 692 730 default: { 693 731 // distinguish other cases 694 if (IsConstantNode(node)) {732 if (IsConstantNode(node)) { 695 733 var vArr = new double[parameterValues.Length]; // backing array for vector 696 734 vArr[nodeIdx[node]] = 1.0; … … 724 762 private void RegisterEventHandlers() { 725 763 ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged; 726 if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;764 if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed; 727 765 728 766 TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged; 729 if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;767 if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged; 730 768 731 769 FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged; 732 if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;770 if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged; 733 771 734 772 MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged; … … 775 813 UpdateTargetVariables(); // implicitly updates other dependent parameters 776 814 var handler = ProblemDataChanged; 777 if (handler != null) handler(this, EventArgs.Empty);815 if (handler != null) handler(this, EventArgs.Empty); 778 816 } 779 817 … … 792 830 l.Add(new StringValue("%").AsReadOnly()); 793 831 l.Add(new StringValue("-").AsReadOnly()); 832 l.Add(new StringValue("sin").AsReadOnly()); 833 l.Add(new StringValue("cos").AsReadOnly()); 794 834 return l.AsReadOnly(); 795 835 } … … 808 848 var newVariablesList = new CheckedItemCollection<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly(); 809 849 var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray(); 810 foreach (var matchingItem in matchingItems) {850 foreach (var matchingItem in matchingItems) { 811 851 newVariablesList.SetItemCheckedState(matchingItem, true); 812 852 } … … 817 857 var encoding = new MultiEncoding(); 818 858 var g = CreateGrammar(); 819 foreach (var targetVar in TargetVariables.CheckedItems) {859 foreach (var targetVar in TargetVariables.CheckedItems) { 820 860 encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length 821 861 } 822 for (int i = 1; i <= NumberOfLatentVariables; i++) {862 for (int i = 1; i <= NumberOfLatentVariables; i++) { 823 863 encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength)); 824 864 } … … 838 878 //}, 1, 1); 839 879 840 foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value)))880 foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value))) 841 881 g.AddTerminalSymbol(variableName); 842 882 … … 844 884 // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees 845 885 var numericConstantsFactor = 2.0; 846 for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {886 for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) { 847 887 g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff 848 888 } 849 889 850 890 // generate symbols for latent variables 851 for (int i = 1; i <= NumberOfLatentVariables; i++) {891 for (int i = 1; i <= NumberOfLatentVariables; i++) { 852 892 g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff 853 893 }
Note: See TracChangeset
for help on using the changeset viewer.