Changeset 15968 for branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
- Timestamp:
- 06/20/18 14:33:02 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
-
Property
svn:ignore
set to
Plugin.cs
-
Property
svn:ignore
set to
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r15964 r15968 24 24 using System.Diagnostics; 25 25 using System.Linq; 26 using HeuristicLab.Analysis; 27 using HeuristicLab.Collections; 26 28 using HeuristicLab.Common; 27 29 using HeuristicLab.Core; 30 using HeuristicLab.Data; 28 31 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 32 using HeuristicLab.Optimization; 29 33 using HeuristicLab.Parameters; 30 34 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; … … 100 104 [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)] 101 105 [StorableClass] 102 public sealed class Problem : S ymbolicExpressionTreeProblem, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {106 public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> { 103 107 104 108 #region parameter names 105 private const string ProblemDataParameterName = "ProblemData"; 109 private const string ProblemDataParameterName = "Data"; 110 private const string TargetVariablesParameterName = "Target variables"; 111 private const string FunctionSetParameterName = "Function set"; 112 private const string MaximumLengthParameterName = "Size limit"; 113 private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations"; 106 114 #endregion 107 115 … … 111 119 public IValueParameter<IRegressionProblemData> ProblemDataParameter { 112 120 get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; } 121 } 122 public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> TargetVariablesParameter { 123 get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[TargetVariablesParameterName]; } 124 } 125 public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> FunctionSetParameter { 126 get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[FunctionSetParameterName]; } 127 } 128 public IFixedValueParameter<IntValue> MaximumLengthParameter { 129 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; } 130 } 131 public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter { 132 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; } 113 133 } 114 134 #endregion … … 120 140 } 121 141 IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } } 122 #endregion 142 143 public ReadOnlyCheckedItemCollection<StringValue> TargetVariables { 144 get { return TargetVariablesParameter.Value; } 145 } 146 147 public ReadOnlyCheckedItemCollection<StringValue> FunctionSet { 148 get { return FunctionSetParameter.Value; } 149 } 150 151 public int MaximumLength { 152 get { return MaximumLengthParameter.Value.Value; } 153 } 154 public int MaximumParameterOptimizationIterations { 155 get { return MaximumParameterOptimizationIterationsParameter.Value.Value; } 156 } 157 #endregion 123 158 124 159 public event EventHandler ProblemDataChanged; … … 147 182 public Problem() 148 183 : base() { 184 var targetVariables = new CheckedItemCollection<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem 185 var functions = CreateFunctionSet(); 149 186 Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system", new RegressionProblemData())); 150 151 // TODO: support multiple target variables 152 153 var g = new SimpleSymbolicExpressionGrammar(); // empty grammar is replaced in UpdateGrammar() 154 base.Encoding = new SymbolicExpressionTreeEncoding(g, 10, 5); // small for testing 155 156 UpdateGrammar(); 187 Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables)); 188 Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions)); 189 Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression", new IntValue(20))); 190 Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS)", new IntValue(100))); 191 157 192 RegisterEventHandlers(); 158 } 159 160 161 public override double Evaluate(ISymbolicExpressionTree tree, IRandom random) { 193 InitAllParameters(); 194 } 195 196 197 public override double Evaluate(Individual individual, IRandom random) { 198 var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual 199 162 200 var problemData = ProblemData; 163 201 var rows = ProblemData.TrainingIndices.ToArray(); 164 var target = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows); 202 var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray(); 203 var targetValues = new double[rows.Length,targetVars.Length]; 204 205 // collect values of all target variables 206 var colIdx = 0; 207 foreach(var targetVar in targetVars) { 208 int rowIdx = 0; 209 foreach(var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) { 210 targetValues[rowIdx, colIdx] = value; 211 rowIdx++; 212 } 213 colIdx++; 214 } 165 215 166 216 var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>(); 167 168 foreach(var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) { 169 nodeIdx.Add(node, nodeIdx.Count); 217 218 foreach (var tree in trees) { 219 foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) { 220 nodeIdx.Add(node, nodeIdx.Count); 221 } 170 222 } 171 223 … … 177 229 alglib.minlbfgsreport report; 178 230 alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state); 179 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, 100);180 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, new object[] { tree , problemData, nodeIdx });231 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations); 232 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows }); //TODO: create a type 181 233 alglib.minlbfgsresults(state, out optTheta, out report); 182 234 … … 213 265 double[] grad = new double[optTheta.Length]; 214 266 double optQuality = double.NaN; 215 EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad, new object[] { tree , problemData, nodeIdx});267 EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad, new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows }); 216 268 if (double.IsNaN(optQuality) || double.IsInfinity(optQuality)) return 10E6; // return a large value (TODO: be consistent by using NMSE) 217 // TODO: write back values 269 270 individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 218 271 return optQuality; 219 272 } 220 273 221 274 private static void EvaluateObjectiveAndGradient(double[] x, ref double f, double[] grad, object obj) { 222 var tree = (ISymbolicExpressionTree)((object[])obj)[0]; 223 var problemData = (IRegressionProblemData)((object[])obj)[1]; 224 var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[2]; 225 275 var trees = (ISymbolicExpressionTree[])((object[])obj)[0]; 276 var targetVariables = (string[])((object[])obj)[1]; 277 var problemData = (IRegressionProblemData)((object[])obj)[2]; 278 var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[3]; 279 var targetValues = (double[,])((object[])obj)[4]; 280 var rows = (int[])((object[])obj)[5]; 226 281 227 282 var predicted = Integrate( 228 new[] { tree }, // we assume tree contains an expression for the change of thetarget variable over time y'(t)283 trees, // we assume trees contain expressions for the change of each target variable over time y'(t) 229 284 problemData.Dataset, 230 285 problemData.AllowedInputVariables.ToArray(), 231 new[] { problemData.TargetVariable },232 problemData.TrainingIndices,233 nodeIdx, 286 targetVariables, 287 rows, 288 nodeIdx, // TODO: is it Ok to use rows here ? 234 289 x).ToArray(); 235 290 236 // objective function is MSE 291 292 // for normalized MSE = 1/variance(t) * MSE(t, pred) 293 var invVar = Enumerable.Range(0, targetVariables.Length) 294 .Select(c => rows.Select(row => targetValues[row, c])) // colums vectors 295 .Select(vec => vec.Variance()) 296 .Select(v => 1.0 / v) 297 .ToArray(); 298 299 // objective function is NMSE 237 300 f = 0.0; 238 301 int n = predicted.Length; 239 302 double invN = 1.0 / n; 240 303 var g = Vector.Zero; 241 foreach(var pair in predicted.Zip(problemData.TargetVariableTrainingValues, Tuple.Create)) { 242 var y_pred = pair.Item1; 243 var y = pair.Item2; 244 245 var res = (y - y_pred.Item1); 246 var ressq = res * res; 247 f += ressq * invN; 248 g += -2.0 * res * y_pred.Item2 * invN; 304 int r = 0; 305 foreach (var y_pred in predicted) { 306 // TODO NMSE to put the same weight on each target regardless of the value range; 307 for(int c = 0;c<y_pred.Length;c++) { 308 309 var y_pred_f = y_pred[c].Item1; 310 var y = targetValues[r,c]; 311 312 var res = (y - y_pred_f); 313 var ressq = res * res; 314 f += ressq * invN * invVar[c]; 315 g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c]; 316 } 317 r++; 249 318 } 250 319 … … 252 321 } 253 322 254 255 private static IEnumerable<Tuple<double, Vector>> Integrate( 323 public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) { 324 base.Analyze(individuals, qualities, results, random); 325 326 if (!results.ContainsKey("Prediction (training)")) { 327 results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>))); 328 } 329 if (!results.ContainsKey("Prediction (test)")) { 330 results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>))); 331 } 332 if (!results.ContainsKey("Models")) { 333 results.Add(new Result("Models", typeof(ReadOnlyItemList<ISymbolicExpressionTree>))); 334 } 335 336 // TODO extract common functionality from Evaluate and Analyze 337 var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities); 338 var optTheta = ((DoubleArray) bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate 339 var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual 340 var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>(); 341 342 343 foreach (var tree in trees) { 344 foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) { 345 nodeIdx.Add(node, nodeIdx.Count); 346 } 347 } 348 var problemData = ProblemData; 349 var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray(); 350 351 var trainingList = new ItemList<DataTable>(); 352 var trainingRows = ProblemData.TrainingIndices.ToArray(); 353 var trainingPrediction = Integrate( 354 trees, // we assume trees contain expressions for the change of each target variable over time y'(t) 355 problemData.Dataset, 356 problemData.AllowedInputVariables.ToArray(), 357 targetVars, 358 trainingRows, 359 nodeIdx, 360 optTheta).ToArray(); 361 362 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 363 var targetVar = targetVars[colIdx]; 364 var trainingDataTable = new DataTable(targetVar+ " prediction (training)"); 365 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows)); 366 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 367 trainingDataTable.Rows.Add(actualValuesRow); 368 trainingDataTable.Rows.Add(predictedValuesRow); 369 trainingList.Add(trainingDataTable); 370 } 371 372 // TODO: DRY for training and test 373 var testList = new ItemList<DataTable>(); 374 var testRows = ProblemData.TestIndices.ToArray(); 375 var testPrediction = Integrate( 376 trees, // we assume trees contain expressions for the change of each target variable over time y'(t) 377 problemData.Dataset, 378 problemData.AllowedInputVariables.ToArray(), 379 targetVars, 380 testRows, 381 nodeIdx, 382 optTheta).ToArray(); 383 384 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 385 var targetVar = targetVars[colIdx]; 386 var testDataTable = new DataTable(targetVar + " prediction (test)"); 387 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows)); 388 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 389 testDataTable.Rows.Add(actualValuesRow); 390 testDataTable.Rows.Add(predictedValuesRow); 391 testList.Add(testDataTable); 392 } 393 394 results["Prediction (training)"].Value = trainingList.AsReadOnly(); 395 results["Prediction (test)"].Value = testList.AsReadOnly(); 396 results["Models"].Value = new ItemList<ISymbolicExpressionTree>(trees).AsReadOnly(); 397 } 398 399 400 #region interpretation 401 private static IEnumerable<Tuple<double, Vector>[]> Integrate( 256 402 ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, IEnumerable<int> rows, 257 403 Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx, double[] parameterValues) { … … 261 407 262 408 // return first value as stored in the dataset 263 yield return Tuple.Create(dataset.GetDoubleValue(targetVariables.First(), rows.First()), Vector.Zero); 409 410 yield return targetVariables 411 .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero)) 412 .ToArray(); 264 413 265 414 // integrate forward starting with known values for the target in t0 … … 295 444 } 296 445 297 // yield target values 298 foreach (var varName in targetVariables) { 299 yield return variableValues[varName]; 300 } 446 yield return targetVariables 447 .Select(targetVar => variableValues[targetVar]) 448 .ToArray(); 301 449 302 450 // update for next time step … … 322 470 } 323 471 case "*": { 324 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);472 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); 325 473 var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues); 326 474 … … 329 477 330 478 case "-": { 331 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);479 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); 332 480 var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues); 333 481 … … 335 483 } 336 484 case "%": { 337 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);485 var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); 338 486 var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues); 339 487 … … 363 511 } 364 512 } 365 513 #endregion 366 514 367 515 #region events 516 /* 517 * Dependencies between parameters: 518 * 519 * ProblemData 520 * | 521 * V 522 * TargetVariables FunctionSet MaximumLength 523 * | | | 524 * V V | 525 * Grammar <---------------+ 526 * | 527 * V 528 * Encoding 529 */ 368 530 private void RegisterEventHandlers() { 369 ProblemDataParameter.ValueChanged += new EventHandler(ProblemDataParameter_ValueChanged); 370 if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += new EventHandler(ProblemData_Changed); 531 ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged; 532 if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed; 533 534 TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged; 535 if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged; 536 537 FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged; 538 if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged; 539 540 MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged; 541 } 542 543 private void MaximumLengthChanged(object sender, EventArgs e) { 544 UpdateGrammarAndEncoding(); 545 } 546 547 private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) { 548 FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged; 549 } 550 551 private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) { 552 UpdateGrammarAndEncoding(); 553 } 554 555 private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) { 556 TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged; 557 } 558 559 private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) { 560 UpdateGrammarAndEncoding(); 371 561 } 372 562 373 563 private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) { 374 ProblemDataParameter.Value.Changed += new EventHandler(ProblemData_Changed);564 ProblemDataParameter.Value.Changed += ProblemData_Changed; 375 565 OnProblemDataChanged(); 376 566 OnReset(); … … 378 568 379 569 private void ProblemData_Changed(object sender, EventArgs e) { 570 OnProblemDataChanged(); 380 571 OnReset(); 381 572 } 382 573 383 574 private void OnProblemDataChanged() { 384 Update Grammar();575 UpdateTargetVariables(); // implicitly updates other dependent parameters 385 576 386 577 var handler = ProblemDataChanged; … … 388 579 } 389 580 390 private void UpdateGrammar() { 581 #endregion 582 583 #region helper 584 585 private void InitAllParameters() { 586 UpdateTargetVariables(); // implicitly updates the grammar and the encoding 587 } 588 589 private ReadOnlyCheckedItemCollection<StringValue> CreateFunctionSet() { 590 var l = new CheckedItemCollection<StringValue>(); 591 l.Add(new StringValue("+").AsReadOnly()); 592 l.Add(new StringValue("*").AsReadOnly()); 593 l.Add(new StringValue("%").AsReadOnly()); 594 l.Add(new StringValue("-").AsReadOnly()); 595 return l.AsReadOnly(); 596 } 597 598 private static bool IsConstantNode(ISymbolicExpressionTreeNode n) { 599 return n.Symbol.Name.StartsWith("θ"); 600 } 601 602 603 private void UpdateTargetVariables() { 604 var currentlySelectedVariables = TargetVariables.CheckedItems.Select(i => i.Value).ToArray(); 605 606 var newVariablesList = new CheckedItemCollection<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly(); 607 var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray(); 608 foreach (var matchingItem in matchingItems) { 609 newVariablesList.SetItemCheckedState(matchingItem, true); 610 } 611 TargetVariablesParameter.Value = newVariablesList; 612 } 613 614 private void UpdateGrammarAndEncoding() { 615 var encoding = new MultiEncoding(); 616 var g = CreateGrammar(); 617 foreach (var targetVar in TargetVariables.CheckedItems) { 618 encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar+"_tree",g, MaximumLength, MaximumLength)); // only limit by length 619 } 620 Encoding = encoding; 621 } 622 623 private ISymbolicExpressionGrammar CreateGrammar() { 391 624 // whenever ProblemData is changed we create a new grammar with the necessary symbols 392 625 var g = new SimpleSymbolicExpressionGrammar(); 393 g.AddSymbols(new[] { 394 "+", 395 "*", 396 // "%", // % is protected division 1/0 := 0 // removed for testing 397 "-", 398 }, 2, 2); 626 g.AddSymbols(FunctionSet.CheckedItems.Select(i => i.Value).ToArray(), 2, 2); 399 627 400 628 // TODO … … 405 633 //}, 1, 1); 406 634 407 foreach (var variableName in ProblemData.AllowedInputVariables) 408 g.AddTerminalSymbol(variableName); 409 foreach (var variableName in new string[] { ProblemData.TargetVariable }) // TODO: multiple target variables 635 foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value))) 410 636 g.AddTerminalSymbol(variableName); 411 637 … … 413 639 // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees 414 640 var numericConstantsFactor = 2.0; 415 for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + 1); i++) {641 for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) { 416 642 g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff 417 643 } 418 Encoding.Grammar = g; 419 } 644 return g; 645 } 646 420 647 #endregion 421 648 … … 432 659 #endregion 433 660 434 435 #region helper436 437 private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {438 return n.Symbol.Name.StartsWith("θ");439 }440 441 #endregion442 443 661 } 444 662 }
Note: See TracChangeset
for help on using the changeset viewer.