Changeset 16954 for branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling
- Timestamp:
- 05/15/19 12:45:38 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 25 added
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/HeuristicLab.Problems.DynamicalSystemsModelling-3.3.csproj
r16664 r16954 114 114 </Reference> 115 115 <Reference Include="System.Drawing" /> 116 <Reference Include="System.IO.Compression" /> 116 117 <Reference Include="System.Windows.Forms" /> 117 118 </ItemGroup> 118 119 <ItemGroup> 120 <Compile Include="Instances\DataDescriptor.cs" /> 119 121 <Compile Include="OdeParameterIdentification.cs" /> 120 122 <Compile Include="Plugin.cs" /> 121 123 <Compile Include="Problem.cs" /> 122 124 <Compile Include="Properties\AssemblyInfo.cs" /> 125 <Compile Include="ProblemInstanceProvider.cs" /> 123 126 <Compile Include="Solution.cs" /> 124 127 <Compile Include="SolutionView.cs"> … … 132 135 <ItemGroup> 133 136 <None Include="HeuristicLab.snk" /> 137 <None Include="Instances\bacterial_1.csv" /> 138 <EmbeddedResource Include="Instances\bacterial_1.csv.zip" /> 139 <None Include="Instances\bar_magnets_1.csv" /> 140 <EmbeddedResource Include="Instances\bar_magnets_1.csv.zip" /> 141 <None Include="Instances\ChemicalReaction.csv" /> 142 <EmbeddedResource Include="Instances\ChemicalReaction.csv.zip" /> 143 <None Include="Instances\E-CELL.csv" /> 144 <EmbeddedResource Include="Instances\E-CELL.csv.zip" /> 145 <None Include="Instances\Glider_1.csv" /> 146 <EmbeddedResource Include="Instances\Glider_1.csv.zip" /> 147 <None Include="Instances\LotkaVolterra.csv" /> 148 <EmbeddedResource Include="Instances\LotkaVolterra.csv.zip" /> 149 <None Include="Instances\predator_prey_1.csv" /> 150 <EmbeddedResource Include="Instances\predator_prey_1.csv.zip" /> 151 <None Include="Instances\S-System.csv" /> 152 <EmbeddedResource Include="Instances\S-System.csv.zip" /> 153 <None Include="Instances\shear_flow_1.csv" /> 154 <EmbeddedResource Include="Instances\shear_flow_1.csv.zip" /> 155 <None Include="Instances\ThreeLotkaVolterra.csv" /> 156 <EmbeddedResource Include="Instances\ThreeLotkaVolterra.csv.zip" /> 157 <None Include="Instances\van_der_pol_1.csv" /> 158 <EmbeddedResource Include="Instances\van_der_pol_1.csv.zip" /> 134 159 <None Include="packages.config" /> 135 160 <None Include="Plugin.cs.frame" /> … … 232 257 <Name>HeuristicLab.Problems.DataAnalysis-3.4</Name> 233 258 <Private>False</Private> 259 </ProjectReference> 260 <ProjectReference Include="..\..\HeuristicLab.Problems.Instances.DataAnalysis\3.3\HeuristicLab.Problems.Instances.DataAnalysis-3.3.csproj"> 261 <Project>{94C7714E-29D4-4D6D-B213-2C18D627AB75}</Project> 262 <Name>HeuristicLab.Problems.Instances.DataAnalysis-3.3</Name> 234 263 </ProjectReference> 235 264 <ProjectReference Include="..\..\HeuristicLab.Problems.Instances\3.3\HeuristicLab.Problems.Instances-3.3.csproj"> -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Plugin.cs.frame
r16952 r16954 52 52 [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Regression", "3.4")] 53 53 [PluginDependency("HeuristicLab.Problems.Instances", "3.3")] 54 [PluginDependency("HeuristicLab.Problems.Instances.DataAnalysis", "3.3")] 54 55 [PluginDependency("HeuristicLab.Random", "3.3")] 55 56 public class Plugin : PluginBase { -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16951 r16954 33 33 using HeuristicLab.Optimization; 34 34 using HeuristicLab.Parameters; 35 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;36 35 using HeuristicLab.Problems.DataAnalysis; 37 36 using HeuristicLab.Problems.DataAnalysis.Symbolic; … … 45 44 [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)] 46 45 [StorableType("065C6A61-773A-42C9-9DE5-61A5D1D823EB")] 47 public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer< IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {46 public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<Problem> { 48 47 #region parameter names 49 48 private const string ProblemDataParameterName = "Data"; … … 103 102 get { return (IFixedValueParameter<DoubleValue>)Parameters["ODE NMSE weight"]; } 104 103 } 104 public IFixedValueParameter<DoubleValue> NumericDifferencesSmoothingParameter { 105 get { return (IFixedValueParameter<DoubleValue>)Parameters["Numeric differences smoothing"]; } 106 } 105 107 #endregion 106 108 … … 135 137 get { return NumericIntegrationStepsParameter.Value.Value; } 136 138 } 137 public I Enumerable<IntRange> TrainingEpisodes {139 public IList<IntRange> TrainingEpisodes { 138 140 get { return TrainingEpisodesParameter.Value; } 139 141 } … … 141 143 get { return OptimizeParametersForEpisodesParameter.Value.Value; } 142 144 } 145 public double NumericDifferencesSmoothing { 146 get { return NumericDifferencesSmoothingParameter.Value.Value; } 147 } 148 143 149 144 150 public string OdeSolver { … … 212 218 Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5))); 213 219 Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5))); 220 Parameters.Add(new FixedValueParameter<DoubleValue>("Numeric differences smoothing", "Determines the amount of smoothing for the numeric differences which are calculated for pre-tuning. Values from -8 to 8 are reasonable. Use very low value if the data contains no noise. Default: 2.", new DoubleValue(2.0))); 214 221 215 222 var solversStr = new string[] { "HeuristicLab" /* , "CVODES" */}; … … 248 255 return totalNMSE / totalSize; 249 256 } else { 250 // double[] optTheta; 251 double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations, 252 PretuningErrorWeight.Value.Value, OdeErrorWeight.Value.Value); 257 // when no training episodes are specified then we implicitly use the training parition from the problemData 258 var trainingEpisodes = TrainingEpisodes; 259 if (!trainingEpisodes.Any()) { 260 trainingEpisodes = new List<IntRange>(); 261 trainingEpisodes.Add((IntRange)ProblemData.TrainingPartition.Clone()); 262 } 263 double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, trainingEpisodes, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations, 264 PretuningErrorWeight.Value.Value, OdeErrorWeight.Value.Value, NumericDifferencesSmoothing); 253 265 // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 254 266 return nmse; … … 268 280 int maxOdeParameterOptIterations, 269 281 double pretuningErrorWeight = 0.5, 270 double odeErrorWeight = 0.5 282 double odeErrorWeight = 0.5, 283 double numericDifferencesSmoothing = 2 271 284 ) { 272 285 … … 280 293 281 294 // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t) 282 double nmse = pretuningErrorWeight * PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxPretuningParameterOptIterations, 295 double nmse = pretuningErrorWeight * PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, 296 maxPretuningParameterOptIterations, numericDifferencesSmoothing, 283 297 initialTheta, out double[] pretunedParameters); 284 298 … … 316 330 IEnumerable<IntRange> episodes, 317 331 int maxParameterOptIterations, 332 double numericDifferencesSmoothing, // for smoothing of numeric differences 318 333 double[][] initialTheta, 319 334 out double[] optTheta) { … … 326 341 327 342 // first calculate values of latent variables by integration 328 if (latentVariables.Length > 0) {343 if (latentVariables.Length > 0) { 329 344 var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct(); 330 345 var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "HeuristicLab"); … … 360 375 var episodeRows = Enumerable.Range(ep.Start, ep.Size); 361 376 var targetValues = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], episodeRows).ToArray(); 362 targetValuesDiff.AddRange( targetValues.Skip(1).Zip(targetValues, (t1, t0) => t1 - t0));// TODO: smoothing or multi-pole);363 } 364 var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End - 1)); // because we lose the last row in the differencing step377 targetValuesDiff.AddRange(CalculateDifferences(targetValues, numericDifferencesSmoothing)); 378 } 379 var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End)); 365 380 366 381 // data for input variables is assumed to be known … … 417 432 418 433 434 419 435 // similar to above but this time we integrate and optimize all parameters for all targets concurrently 420 436 private static double OptimizeParameters(ISymbolicExpressionTree[] trees, IRegressionProblemData problemData, string[] targetVars, string[] latentVariables, … … 617 633 // } 618 634 635 // when no training episodes are specified then we implicitly use the training parition from the problemData 636 var trainingEpisodes = TrainingEpisodes; 637 if (!trainingEpisodes.Any()) { 638 trainingEpisodes = new List<IntRange>(); 639 trainingEpisodes.Add((IntRange)ProblemData.TrainingPartition.Clone()); 640 } 641 619 642 var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities); 620 643 var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual … … 674 697 .Distinct(); 675 698 676 var optimizationData = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);699 var optimizationData = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, trainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver); 677 700 var numParams = optimizationData.nodeValueLookup.ParameterCount; 678 701 … … 755 778 // results["Squared error and gradient"].Value = errorTable; 756 779 757 // TODO: DRY for training and test 758 var testList = new ItemList<DataTable>(); 759 var testRows = ProblemData.TestIndices.ToArray(); 760 var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver); 761 var testPrediction = Integrate(testOptimizationData).ToArray(); 762 763 for (int colIdx = 0; colIdx < trees.Length; colIdx++) { 764 // is target variable 765 if (colIdx < targetVars.Length) { 766 var targetVar = targetVars[colIdx]; 767 var testDataTable = new DataTable(targetVar + " prediction (test)"); 768 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows)); 769 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 770 testDataTable.Rows.Add(actualValuesRow); 771 testDataTable.Rows.Add(predictedValuesRow); 772 testList.Add(testDataTable); 773 774 } else { 775 // var latentVar = latentVariables[colIdx - targetVars.Length]; 776 // var testDataTable = new DataTable(latentVar + " prediction (test)"); 777 // var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 778 // var emptyRow = new DataRow(latentVar); 779 // testDataTable.Rows.Add(emptyRow); 780 // testDataTable.Rows.Add(predictedValuesRow); 781 // testList.Add(testDataTable); 782 } 783 } 784 785 results["Prediction (training)"].Value = trainingList.AsReadOnly(); 786 results["Prediction (test)"].Value = testList.AsReadOnly(); 787 780 // only if there is a non-empty test partition 781 if (ProblemData.TestIndices.Any()) { 782 // TODO: DRY for training and test 783 784 var testList = new ItemList<DataTable>(); 785 var testRows = ProblemData.TestIndices.ToArray(); 786 var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver); 787 var testPrediction = Integrate(testOptimizationData).ToArray(); 788 789 for (int colIdx = 0; colIdx < trees.Length; colIdx++) { 790 // is target variable 791 if (colIdx < targetVars.Length) { 792 var targetVar = targetVars[colIdx]; 793 var testDataTable = new DataTable(targetVar + " prediction (test)"); 794 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows)); 795 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 796 testDataTable.Rows.Add(actualValuesRow); 797 testDataTable.Rows.Add(predictedValuesRow); 798 testList.Add(testDataTable); 799 800 } else { 801 // var latentVar = latentVariables[colIdx - targetVars.Length]; 802 // var testDataTable = new DataTable(latentVar + " prediction (test)"); 803 // var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 804 // var emptyRow = new DataRow(latentVar); 805 // testDataTable.Rows.Add(emptyRow); 806 // testDataTable.Rows.Add(predictedValuesRow); 807 // testList.Add(testDataTable); 808 } 809 } 810 811 results["Prediction (training)"].Value = trainingList.AsReadOnly(); 812 results["Prediction (test)"].Value = testList.AsReadOnly(); 813 814 } 788 815 789 816 #region simplification of models … … 802 829 targetVars, 803 830 latentVariables, 804 TrainingEpisodes,831 trainingEpisodes, 805 832 OdeSolver, 806 833 NumericIntegrationSteps); … … 834 861 var solutionDataset = ((Dataset)problemData.Dataset).ToModifiable(); 835 862 var absValues = solutionDataset.GetDoubleValues(name).ToArray(); 836 solutionDataset.AddVariable(name + "_diff", absValues.Skip(1).Zip(absValues, (v1, v0) => v1 - v0).Concat(new double[] { 0.0 }).ToList()); 863 864 solutionDataset.AddVariable(name + "_diff", CalculateDifferences(absValues, NumericDifferencesSmoothing).ToList()); 837 865 var solutionProblemData = new RegressionProblemData(solutionDataset, problemData.AllowedInputVariables, name + "_diff"); 866 solutionProblemData.TrainingPartition.Start = problemData.TrainingPartition.Start; 867 solutionProblemData.TrainingPartition.End = problemData.TrainingPartition.End; 868 solutionProblemData.TestPartition.Start = problemData.TestPartition.Start; 869 solutionProblemData.TestPartition.End = problemData.TestPartition.End; 838 870 var solution = model.CreateRegressionSolution(solutionProblemData); 839 871 results.AddOrUpdateResult("Solution " + name, solution); … … 1544 1576 * 1545 1577 * ProblemData 1546 * | 1547 * V 1548 * TargetVariables FunctionSet MaximumLength NumberOfLatentVariables 1549 * | | | | 1550 * V V | | 1551 * Grammar <---------------+------------------- 1552 * | 1553 * V 1554 * Encoding 1578 * | 1579 * V 1580 * TargetVariables FunctionSet MaximumLength NumberOfLatentVariables 1581 * | | | | 1582 * V V | | 1583 * Grammar <---------------+------------------- 1584 * | 1585 * V 1586 * Encoding 1555 1587 */ 1556 1588 private void RegisterEventHandlers() { … … 1587 1619 private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) { 1588 1620 TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged; 1621 UpdateGrammarAndEncoding(); 1589 1622 } 1590 1623 … … 1614 1647 #region helper 1615 1648 1649 private static double[] CalculateDifferences(double[] targetValues, double numericDifferencesSmoothing) { 1650 var x = Enumerable.Range(0, targetValues.Length).Select(i => (double)i).ToArray(); 1651 alglib.spline1dfitpenalized(x, targetValues, targetValues.Length / 2, numericDifferencesSmoothing, 1652 out int info, out alglib.spline1dinterpolant s, out alglib.spline1dfitreport rep); 1653 if (info <= 0) throw new ArgumentException("There was a problem while smoothing numeric differences. Try to use a different smoothing parameter value."); 1654 1655 double[] dy = new double[x.Length]; 1656 for (int i = 0; i < x.Length; i++) { 1657 double xi = x[i]; 1658 alglib.spline1ddiff(s, xi, out double y, out double dyi, out double d2y); 1659 dy[i] = dyi; 1660 } 1661 return dy; 1662 } 1616 1663 1617 1664 private void InitAllParameters() { … … 1797 1844 1798 1845 1799 #region Import & Export 1800 public void Load(IRegressionProblemData data) { 1801 Name = data.Name; 1802 Description = data.Description; 1803 ProblemData = data; 1804 } 1805 1806 public IRegressionProblemData Export() { 1807 return ProblemData; 1846 #region Import 1847 public void Load(Problem problem) { 1848 // transfer parameter values from problem parameter 1849 this.ProblemData = problem.ProblemData; 1850 this.TrainingEpisodesParameter.Value = problem.TrainingEpisodesParameter.Value; 1851 this.TargetVariablesParameter.Value = problem.TargetVariablesParameter.Value; 1852 this.Name = problem.Name; 1853 this.Description = problem.Description; 1808 1854 } 1809 1855 #endregion
Note: See TracChangeset
for help on using the changeset viewer.