Changeset 16399
- Timestamp:
- 12/19/18 07:43:36 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 4 added
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/HeuristicLab.Problems.DynamicalSystemsModelling-3.3.csproj
r16268 r16399 104 104 <HintPath>..\..\bin\ALGLIB-3.7.0.dll</HintPath> 105 105 </Reference> 106 <Reference Include="Microsoft.CSharp" /> 106 107 <Reference Include="System" /> 107 108 <Reference Include="System.Core"> … … 109 110 </Reference> 110 111 <Reference Include="System.Drawing" /> 112 <Reference Include="System.Windows.Forms" /> 111 113 </ItemGroup> 112 114 <ItemGroup> … … 116 118 <Compile Include="Problem.cs" /> 117 119 <Compile Include="Properties\AssemblyInfo.cs" /> 120 <Compile Include="Solution.cs" /> 121 <Compile Include="SolutionView.cs"> 122 <SubType>UserControl</SubType> 123 </Compile> 124 <Compile Include="SolutionView.Designer.cs"> 125 <DependentUpon>SolutionView.cs</DependentUpon> 126 </Compile> 118 127 <Compile Include="Vector.cs" /> 119 128 </ItemGroup> … … 129 138 <Name>HeuristicLab.Algorithms.DataAnalysis-3.4</Name> 130 139 </ProjectReference> 140 <ProjectReference Include="..\..\HeuristicLab.Analysis.Views\3.3\HeuristicLab.Analysis.Views-3.3.csproj"> 141 <Project>{76945D76-CA61-4147-9DC2-0ACDCDDF87F9}</Project> 142 <Name>HeuristicLab.Analysis.Views-3.3</Name> 143 </ProjectReference> 131 144 <ProjectReference Include="..\..\HeuristicLab.Analysis\3.3\HeuristicLab.Analysis-3.3.csproj"> 132 145 <Project>{887425B4-4348-49ED-A457-B7D2C26DDBF9}</Project> … … 148 161 <Private>False</Private> 149 162 </ProjectReference> 163 <ProjectReference Include="..\..\HeuristicLab.Core.Views\3.3\HeuristicLab.Core.Views-3.3.csproj"> 164 <Project>{E226881D-315F-423D-B419-A766FE0D8685}</Project> 165 <Name>HeuristicLab.Core.Views-3.3</Name> 166 </ProjectReference> 150 167 <ProjectReference Include="..\..\HeuristicLab.Core\3.3\HeuristicLab.Core-3.3.csproj"> 151 168 <Project>{C36BD924-A541-4A00-AFA8-41701378DDC5}</Project> … … 153 170 <Private>False</Private> 154 171 </ProjectReference> 172 <ProjectReference Include="..\..\HeuristicLab.Data.Views\3.3\HeuristicLab.Data.Views-3.3.csproj"> 173 <Project>{72104A0B-90E7-42F3-9ABE-9BBBADD4B943}</Project> 174 <Name>HeuristicLab.Data.Views-3.3</Name> 175 </ProjectReference> 155 176 <ProjectReference Include="..\..\HeuristicLab.Data\3.3\HeuristicLab.Data-3.3.csproj"> 156 177 <Project>{BBAB9DF5-5EF3-4BA8-ADE9-B36E82114937}</Project> … … 162 183 <Name>HeuristicLab.Encodings.SymbolicExpressionTreeEncoding-3.4</Name> 163 184 <Private>False</Private> 185 </ProjectReference> 186 <ProjectReference Include="..\..\HeuristicLab.MainForm.WindowsForms\3.3\HeuristicLab.MainForm.WindowsForms-3.3.csproj"> 187 <Project>{AB687BBE-1BFE-476B-906D-44237135431D}</Project> 188 <Name>HeuristicLab.MainForm.WindowsForms-3.3</Name> 189 </ProjectReference> 190 <ProjectReference Include="..\..\HeuristicLab.MainForm\3.3\HeuristicLab.MainForm-3.3.csproj"> 191 <Project>{3BD61258-31DA-4B09-89C0-4F71FEF5F05A}</Project> 192 <Name>HeuristicLab.MainForm-3.3</Name> 164 193 </ProjectReference> 165 194 <ProjectReference Include="..\..\HeuristicLab.Operators\3.3\HeuristicLab.Operators-3.3.csproj"> … … 235 264 <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> 236 265 </Content> 266 </ItemGroup> 267 <ItemGroup> 268 <EmbeddedResource Include="SolutionView.resx"> 269 <DependentUpon>SolutionView.cs</DependentUpon> 270 </EmbeddedResource> 237 271 </ItemGroup> 238 272 <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16398 r16399 198 198 public override double Evaluate(Individual individual, IRandom random) { 199 199 var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual 200 // write back optimized parameters to tree nodes instead of the separate OptTheta variable 201 // retreive optimized parameters from nodes? 202 200 // write back optimized parameters to tree nodes instead of the separate OptTheta variable 201 // retreive optimized parameters from nodes? 202 203 var problemData = ProblemData; 204 var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray(); 205 var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees 203 206 if (OptimizeParametersForEpisodes) { 204 207 int eIdx = 0; … … 208 211 double[] optTheta; 209 212 double nmse; 210 OptimizeForEpisodes(trees, random, new[] { episode }, out optTheta, out nmse);213 OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse); 211 214 individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 212 215 eIdx++; … … 218 221 double[] optTheta; 219 222 double nmse; 220 OptimizeForEpisodes(trees, random, TrainingEpisodes, out optTheta, out nmse);223 OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse); 221 224 individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 222 225 return nmse; … … 224 227 } 225 228 226 p rivatevoid OptimizeForEpisodes(229 public static void OptimizeForEpisodes( 227 230 ISymbolicExpressionTree[] trees, 231 IRegressionProblemData problemData, 232 string[] targetVars, 233 string[] latentVariables, 228 234 IRandom random, 229 235 IEnumerable<IntRange> episodes, 236 int maxParameterOptIterations, 237 int numericIntegrationSteps, 238 string odeSolver, 230 239 out double[] optTheta, 231 240 out double nmse) { 232 241 var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray(); 233 var problemData = ProblemData;234 var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();235 var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees236 242 var targetValues = new double[rows.Length, targetVars.Length]; 237 243 … … 267 273 alglib.minlbfgsreport report; 268 274 alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state); 269 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);275 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 270 276 //alglib.minlbfgssetgradientcheck(state, 1e-6); 271 277 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, 272 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver }); //TODO: create a type278 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); //TODO: create a type 273 279 274 280 alglib.minlbfgsresults(state, out optTheta, out report); … … 307 313 nmse = double.NaN; 308 314 EvaluateObjectiveAndGradient(optTheta, ref nmse, grad, 309 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver });315 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); 310 316 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE) 311 317 } … … 385 391 results.Add(new Result("Models", typeof(VariableCollection))); 386 392 } 387 if (!results.ContainsKey("SNMSE")) {393 if (!results.ContainsKey("SNMSE")) { 388 394 results.Add(new Result("SNMSE", typeof(DoubleValue))); 395 } 396 if (!results.ContainsKey("Solution")) { 397 results.Add(new Result("Solution", typeof(Solution))); 389 398 } 390 399 … … 517 526 results["Prediction (training)"].Value = trainingList.AsReadOnly(); 518 527 results["Prediction (test)"].Value = testList.AsReadOnly(); 528 529 519 530 #region simplification of models 520 531 // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal 521 532 var models = new VariableCollection(); // to store target var names and original version of tree 522 533 534 var optimizedTrees = new List<ISymbolicExpressionTree>(); 523 535 int nextParIdx = 0; 536 for (int idx = 0; idx < trees.Length; idx++) { 537 var tree = trees[idx]; 538 optimizedTrees.Add(new SymbolicExpressionTree(FixParameters(tree.Root, optTheta.ToArray(), ref nextParIdx))); 539 } 540 var ds = problemData.Dataset; 541 var newVarNames = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray(); 542 var allVarNames = ds.DoubleVariables.Concat(newVarNames); 543 var newVarValues = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray(); 544 var allVarValues = ds.DoubleVariables.Select(varName => ds.GetDoubleValues(varName).ToList()) 545 .Concat(Enumerable.Range(0, nextParIdx).Select(i => Enumerable.Repeat(optTheta[i], ds.Rows).ToList())) 546 .ToList(); 547 var newDs = new Dataset(allVarNames, allVarValues); 548 var newProblemData = new RegressionProblemData(newDs, problemData.AllowedInputVariables.Concat(newVarValues).ToArray(), problemData.TargetVariable); 549 results["Solution"].Value = new Solution(optimizedTrees.ToArray(), 550 // optTheta, 551 newProblemData, 552 targetVars, 553 latentVariables, 554 TrainingEpisodes, 555 OdeSolver, 556 NumericIntegrationSteps); 557 558 559 nextParIdx = 0; 524 560 for (int idx = 0; idx < trees.Length; idx++) { 525 561 var varName = string.Empty; … … 558 594 559 595 } 596 560 597 results["Models"].Value = models; 561 598 #endregion … … 576 613 // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes 577 614 578 p rivatestatic IEnumerable<Tuple<double, Vector>[]> Integrate(615 public static IEnumerable<Tuple<double, Vector>[]> Integrate( 579 616 ISymbolicExpressionTree[] trees, IDataset dataset, 580 617 string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes, … … 999 1036 1000 1037 1001 foreach (var node in nodeValues.Keys.ToArray()) {1002 if (node.SubtreeCount == 0 && !IsConstantNode(node)) {1038 foreach (var node in nodeValues.Keys.ToArray()) { 1039 if (node.SubtreeCount == 0 && !IsConstantNode(node)) { 1003 1040 // update values for (latent) variables 1004 1041 var varName = node.Symbol.Name; … … 1168 1205 1169 1206 private static bool IsConstantNode(ISymbolicExpressionTreeNode n) { 1170 return n.Symbol.Name .StartsWith("θ");1207 return n.Symbol.Name[0] == 'θ'; 1171 1208 } 1172 1209 private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) { 1173 return n.Symbol.Name .StartsWith("λ");1210 return n.Symbol.Name[0] == 'λ'; 1174 1211 } 1175 1212 private static bool IsVariableNode(ISymbolicExpressionTreeNode n) { … … 1232 1269 1233 1270 return g; 1271 } 1272 1273 1274 1275 1276 1277 private ISymbolicExpressionTreeNode FixParameters(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) { 1278 ISymbolicExpressionTreeNode translatedNode = null; 1279 if (n.Symbol is StartSymbol) { 1280 translatedNode = new StartSymbol().CreateTreeNode(); 1281 } else if (n.Symbol is ProgramRootSymbol) { 1282 translatedNode = new ProgramRootSymbol().CreateTreeNode(); 1283 } else if (n.Symbol.Name == "+") { 1284 translatedNode = new SimpleSymbol("+", 2).CreateTreeNode(); 1285 } else if (n.Symbol.Name == "-") { 1286 translatedNode = new SimpleSymbol("-", 2).CreateTreeNode(); 1287 } else if (n.Symbol.Name == "*") { 1288 translatedNode = new SimpleSymbol("*", 2).CreateTreeNode(); 1289 } else if (n.Symbol.Name == "%") { 1290 translatedNode = new SimpleSymbol("%", 2).CreateTreeNode(); 1291 } else if (n.Symbol.Name == "sin") { 1292 translatedNode = new SimpleSymbol("sin", 1).CreateTreeNode(); 1293 } else if (n.Symbol.Name == "cos") { 1294 translatedNode = new SimpleSymbol("cos", 1).CreateTreeNode(); 1295 } else if (n.Symbol.Name == "sqr") { 1296 translatedNode = new SimpleSymbol("sqr", 1).CreateTreeNode(); 1297 } else if (IsConstantNode(n)) { 1298 translatedNode = new SimpleSymbol("c_" + nextParIdx, 0).CreateTreeNode(); 1299 nextParIdx++; 1300 } else { 1301 translatedNode = new SimpleSymbol(n.Symbol.Name, n.SubtreeCount).CreateTreeNode(); 1302 } 1303 foreach (var child in n.Subtrees) { 1304 translatedNode.AddSubtree(FixParameters(child, parameterValues, ref nextParIdx)); 1305 } 1306 return translatedNode; 1234 1307 } 1235 1308
Note: See TracChangeset
for help on using the changeset viewer.