Changeset 16126 for branches/2925_AutoDiffForDynamicalModels
- Timestamp:
- 09/06/18 13:27:50 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/HeuristicLab.Problems.DynamicalSystemsModelling-3.3.csproj
r15968 r16126 180 180 <Private>False</Private> 181 181 </ProjectReference> 182 <ProjectReference Include="..\..\HeuristicLab.Problems.DataAnalysis.Symbolic\3.4\HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.csproj"> 183 <Project>{3D28463F-EC96-4D82-AFEE-38BE91A0CA00}</Project> 184 <Name>HeuristicLab.Problems.DataAnalysis.Symbolic-3.4</Name> 185 </ProjectReference> 182 186 <ProjectReference Include="..\..\HeuristicLab.Problems.DataAnalysis\3.4\HeuristicLab.Problems.DataAnalysis-3.4.csproj"> 183 187 <Project>{DF87C13E-A889-46FF-8153-66DCAA8C5674}</Project> -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r15971 r16126 34 34 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 35 35 using HeuristicLab.Problems.DataAnalysis; 36 using HeuristicLab.Problems.DataAnalysis.Symbolic; 36 37 using HeuristicLab.Problems.Instances; 37 38 … … 248 249 alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state); 249 250 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations); 250 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, 251 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, 251 252 new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows, NumericIntegrationSteps, latentVariables }); //TODO: create a type 252 253 alglib.minlbfgsresults(state, out optTheta, out report); … … 284 285 double[] grad = new double[optTheta.Length]; 285 286 double optQuality = double.NaN; 286 EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad, 287 EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad, 287 288 new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows, NumericIntegrationSteps, latentVariables }); 288 289 if (double.IsNaN(optQuality) || double.IsInfinity(optQuality)) return 10E6; // return a large value (TODO: be consistent by using NMSE) … … 383 384 trainingRows, 384 385 nodeIdx, 385 optTheta, 386 optTheta, 386 387 NumericIntegrationSteps).ToArray(); 387 388 … … 407 408 testRows, 408 409 nodeIdx, 409 optTheta, 410 optTheta, 410 411 NumericIntegrationSteps).ToArray(); 411 412 … … 422 423 results["Prediction (training)"].Value = trainingList.AsReadOnly(); 423 424 results["Prediction (test)"].Value = testList.AsReadOnly(); 424 results["Models"].Value = new ItemList<ISymbolicExpressionTree>(trees).AsReadOnly(); // TODO: simplify trees 425 426 var modelList = new ItemList<ISymbolicExpressionTree>(); 427 foreach (var tree in trees) { 428 var shownTree = (ISymbolicExpressionTree)tree.Clone(); 429 var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode); 430 var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode); 431 432 foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) { 433 double constantsVal = optTheta[nodeIdx[n.original]]; 434 435 ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal }; 436 437 var parentNode = n.shown.Parent; 438 int replacementIndex = parentNode.IndexOfSubtree(n.shown); 439 parentNode.RemoveSubtree(replacementIndex); 440 parentNode.InsertSubtree(replacementIndex, replacementNode); 441 } 442 // TODO: simplify trees 443 444 modelList.Add(shownTree); 445 } 446 results["Models"].Value = modelList.AsReadOnly(); 425 447 } 426 448 … … 613 635 private void OnProblemDataChanged() { 614 636 UpdateTargetVariables(); // implicitly updates other dependent parameters 615 637 UpdateGrammarAndEncoding(); 616 638 var handler = ProblemDataChanged; 617 639 if (handler != null) handler(this, EventArgs.Empty);
Note: See TracChangeset
for help on using the changeset viewer.