- Timestamp:
- 05/15/19 12:15:53 (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16893 r16951 604 604 results.Add(new Result("SNMSE", typeof(DoubleValue))); 605 605 } 606 if(!results.ContainsKey("SNMSE values")) { 607 var dt = new DataTable("SNMSE values"); 608 dt.Rows.Add(new DataRow("ODE SNMSE")); 609 dt.Rows.Add(new DataRow("Fitness")); 610 results.Add(new Result("SNMSE values", dt)); 611 } 606 612 if (!results.ContainsKey("Solution")) { 607 613 results.Add(new Result("Solution", typeof(Solution))); 608 614 } 609 if (!results.ContainsKey("Squared error and gradient")) {610 results.Add(new Result("Squared error and gradient", typeof(DataTable)));611 }615 // if (!results.ContainsKey("Squared error and gradient")) { 616 // results.Add(new Result("Squared error and gradient", typeof(DataTable))); 617 // } 612 618 613 619 var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities); … … 679 685 // for target values and latent variables 680 686 var trainingRows = optimizationData.rows; 687 double trainingSNMSE = 0.0; 681 688 for (int colIdx = 0; colIdx < trees.Length; colIdx++) { 682 689 // is target variable 683 690 if (colIdx < targetVars.Length) { 684 691 var targetVar = targetVars[colIdx]; 685 var trainingDataTable = new DataTable(targetVar + " prediction (training)"); 692 var trainingDataTable = new DataTable(targetVar + " prediction (training)"); 686 693 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows)); 687 694 var idx = Enumerable.Range(0, trainingRows.Length).Select(i => i * targetVars.Length + colIdx); … … 690 697 trainingDataTable.Rows.Add(actualValuesRow); 691 698 trainingDataTable.Rows.Add(predictedValuesRow); 699 700 // again calculate the integrated error (regardless how fitness is determined) 701 trainingSNMSE += actualValuesRow.Values.Zip(predictedValuesRow.Values, (a, p) => Math.Pow(a - p, 2)).Average() / actualValuesRow.Values.Variance() / targetVars.Length; 692 702 693 703 for (int paramIdx = 0; paramIdx < numParams; paramIdx++) { … … 710 720 } 711 721 712 var errorTable = new DataTable("Squared error and gradient"); 713 var seRow = new DataRow("Squared error"); 714 var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray(); 715 errorTable.Rows.Add(seRow); 716 foreach (var gRow in gradientRows) { 717 gRow.VisualProperties.SecondYAxis = true; 718 errorTable.Rows.Add(gRow); 719 } 720 var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray(); 721 int r = 0; 722 723 // foreach (var y_pred in trainingPrediction) { 722 results.AddOrUpdateResult("ODE SNMSE", new DoubleValue(trainingSNMSE)); 723 var odeSNMSETable = (DataTable)results["SNMSE values"].Value; 724 odeSNMSETable.Rows["ODE SNMSE"].Values.Add(trainingSNMSE); 725 odeSNMSETable.Rows["Fitness"].Values.Add(bestIndividualAndQuality.Item2); 726 727 // var errorTable = new DataTable("Squared error and gradient"); 728 // var seRow = new DataRow("Squared error"); 729 // var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray(); 730 // errorTable.Rows.Add(seRow); 731 // foreach (var gRow in gradientRows) { 732 // gRow.VisualProperties.SecondYAxis = true; 733 // errorTable.Rows.Add(gRow); 734 // } 735 // var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray(); 736 // int r = 0; 737 738 // foreach (var y_pred in fi) { 724 739 // // calculate objective function gradient 725 740 // double f_i = 0.0; … … 1599 1614 #region helper 1600 1615 1601 private static IEnumerable<T> EveryNth<T>(IEnumerable<T> xs, int step) {1602 var e = xs.GetEnumerator();1603 while (e.MoveNext()) {1604 for (int i = 0; i < step; i++) {1605 if (!e.MoveNext()) yield break;1606 }1607 yield return e.Current;1608 }1609 }1610 1616 1611 1617 private void InitAllParameters() { … … 1630 1636 1631 1637 private static bool IsConstantNode(ISymbolicExpressionTreeNode n) { 1632 // return n.Symbol.Name[0] == 'θ';1633 1638 return n is ConstantTreeNode; 1634 1639 } … … 1736 1741 1737 1742 grammar.ConfigureAsDefaultRegressionGrammar(); 1738 grammar.GetSymbol("Logarithm").Enabled = false; // not supported yet1739 grammar.GetSymbol("Exponential").Enabled = false; // not supported yet1740 1743 1741 1744 // configure initialization of constants
Note: See TracChangeset
for help on using the changeset viewer.