Changeset 16914
- Timestamp:
- 05/07/19 18:36:04 (6 years ago)
- Location:
- branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions
- Files:
-
- 1 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedConstantOptimizationEvaluator.cs
r16912 r16914 32 32 33 33 namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression { 34 [Item("Constant Optimization Evaluator ( new)", "Calculates Pearson R² of a symbolic regression solution and optimizes the constant used.")]35 [StorableType(" 1D5361E9-EF73-47D2-9211-FDD39BBC1018")]36 public class SymbolicRegressionNewConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {34 [Item("Constant Optimization Evaluator (with constraints)", "")] 35 [StorableType("A8958E06-C54A-4193-862E-8315C86EB5C1")] 36 public class ConstrainedConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator { 37 37 private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations"; 38 38 private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement"; … … 104 104 105 105 public override bool Maximization { 106 get { return true; }106 get { return false; } 107 107 } 108 108 109 109 [StorableConstructor] 110 protected SymbolicRegressionNewConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }111 protected SymbolicRegressionNewConstantOptimizationEvaluator(SymbolicRegressionNewConstantOptimizationEvaluator original, Cloner cloner)110 protected ConstrainedConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { } 111 protected ConstrainedConstantOptimizationEvaluator(ConstrainedConstantOptimizationEvaluator original, Cloner cloner) 112 112 : base(original, cloner) { 113 113 } 114 public SymbolicRegressionNewConstantOptimizationEvaluator()114 public ConstrainedConstantOptimizationEvaluator() 115 115 : base() { 116 116 Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10))); … … 127 127 128 128 public override IDeepCloneable Clone(Cloner cloner) { 129 return new SymbolicRegressionNewConstantOptimizationEvaluator(this, cloner);129 return new ConstrainedConstantOptimizationEvaluator(this, cloner); 130 130 } 131 131 … … 146 146 if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) { 147 147 var evaluationRows = GenerateRowsToEvaluate(); 148 quality = SymbolicRegressionSingleObjective PearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);148 quality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, double.MinValue, double.MaxValue, ProblemDataParameter.ActualValue, evaluationRows, applyLinearScaling: false); 149 149 } 150 150 … … 158 158 } else { 159 159 var evaluationRows = GenerateRowsToEvaluate(); 160 quality = SymbolicRegressionSingleObjective PearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);160 quality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, double.MinValue, double.MaxValue, ProblemDataParameter.ActualValue, evaluationRows, applyLinearScaling: false); 161 161 } 162 162 QualityParameter.ActualValue = new DoubleValue(quality); … … 172 172 GradientEvaluationsResultParameter.ExecutionContext = context; 173 173 174 // Pearson R²evaluator is used on purpose instead of the const-opt evaluator,174 // MSE evaluator is used on purpose instead of the const-opt evaluator, 175 175 // because Evaluate() is used to get the quality of evolved models on 176 176 // different partitions of the dataset (e.g., best validation model) 177 double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);177 double mse = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, double.MinValue, double.MaxValue, problemData, rows, applyLinearScaling: false); 178 178 179 179 SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null; … … 183 183 GradientEvaluationsResultParameter.ExecutionContext = null; 184 184 185 return r2;185 return mse; 186 186 } 187 187 … … 214 214 bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) { 215 215 216 if (!updateVariableWeights) throw new NotSupportedException(); 217 218 // // numeric constants in the tree become variables for constant opt 219 // // variables in the tree become parameters (fixed values) for constant opt 220 // // for each parameter (variable in the original tree) we store the 221 // // variable name, variable value (for factor vars) and lag as a DataForVariable object. 222 // // A dictionary is used to find parameters 223 // double[] initialConstants; 224 // var parameters = new List<TreeToAutoDiffTermConverter.DataForVariable>(); 225 // 226 // TreeToAutoDiffTermConverter.ParametricFunction func; 227 // TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad; 228 // if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad)) 229 // throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 230 // if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0 231 // var parameterEntries = parameters.ToArray(); // order of entries must be the same for x 232 233 234 GetParameterNodes(tree, out List<ISymbolicExpressionTreeNode> thetaNodes, out List<double> thetaValues); 235 var initialConstants = thetaValues.ToArray(); 236 237 //extract inital constants 238 double[] c; 239 if (applyLinearScaling) { 240 c = new double[initialConstants.Length + 2]; 241 c[0] = 0.0; 242 c[1] = 1.0; 243 Array.Copy(initialConstants, 0, c, 2, initialConstants.Length); 244 } else { 245 c = (double[])initialConstants.Clone(); 246 } 247 248 double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 216 if (!updateVariableWeights) throw new NotSupportedException("not updating variable weights is not supported"); 217 if (!updateConstantsInTree) throw new NotSupportedException("not updating tree parameters is not supported"); 218 if (applyLinearScaling) throw new NotSupportedException("linear scaling is not supported"); 219 220 // we always update constants, so we don't need to calculate initial quality 221 // double originalQuality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); 249 222 250 223 if (counter == null) counter = new EvaluationsCounter(); 251 224 var rowEvaluationsCounter = new EvaluationsCounter(); 252 225 253 alglib.minlmstate state; 254 alglib.minlmreport rep; 255 256 IDataset ds = problemData.Dataset; 257 double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray(); 258 int n = y.Length; 259 int k = c.Length; 260 261 var trainRows = problemData.TrainingIndices.ToArray(); 262 var parameterNodes = thetaNodes.ToArray(); 263 alglib.ndimensional_fvec function_cx_1_func = CreateFunc(tree, new VectorEvaluator(), parameterNodes, ds, problemData.TargetVariable, trainRows); 264 alglib.ndimensional_jac function_cx_1_jac = CreateJac(tree, new VectorAutoDiffEvaluator(), parameterNodes, ds, problemData.TargetVariable, trainRows); 265 alglib.ndimensional_rep xrep = (p, f, obj) => iterationCallback(p, f, obj); 266 226 var intervalConstraints = problemData.IntervalConstraints; 227 var dataIntervals = problemData.VariableRanges.VariableIntervals; 228 229 // convert constants to variables named theta... 230 var treeForDerivation = ReplaceConstWithVar(tree, out List<string> thetaNames, out List<double> thetaValues); // copies the tree 231 232 // create trees for relevant derivatives 233 Dictionary<string, ISymbolicExpressionTree> derivatives = new Dictionary<string, ISymbolicExpressionTree>(); 234 var allThetaNodes = thetaNames.Select(_ => new List<ConstantTreeNode>()).ToArray(); 235 var constraintTrees = new List<ISymbolicExpressionTree>(); 236 foreach (var constraint in intervalConstraints.Constraints) { 237 if (constraint.IsDerivation) { 238 if (!problemData.AllowedInputVariables.Contains(constraint.Variable)) 239 throw new ArgumentException($"Invalid constraint: the variable {constraint.Variable} does not exist in the dataset."); 240 var df = DerivativeCalculator.Derive(treeForDerivation, constraint.Variable); 241 242 // alglib requires constraint expressions of the form c(x) <= 0 243 // -> we make two expressions, one for the lower bound and one for the upper bound 244 245 if (constraint.Interval.UpperBound < double.PositiveInfinity) { 246 var df_smaller_upper = Subtract((ISymbolicExpressionTree)df.Clone(), CreateConstant(constraint.Interval.UpperBound)); 247 // convert variables named theta back to constants 248 var df_prepared = ReplaceVarWithConst(df_smaller_upper, thetaNames, thetaValues, allThetaNodes); 249 constraintTrees.Add(df_prepared); 250 } 251 if (constraint.Interval.LowerBound > double.NegativeInfinity) { 252 var df_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)df.Clone()); 253 // convert variables named theta back to constants 254 var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes); 255 constraintTrees.Add(df_prepared); 256 } 257 } else { 258 if (constraint.Interval.UpperBound < double.PositiveInfinity) { 259 var f_smaller_upper = Subtract((ISymbolicExpressionTree)treeForDerivation.Clone(), CreateConstant(constraint.Interval.UpperBound)); 260 // convert variables named theta back to constants 261 var df_prepared = ReplaceVarWithConst(f_smaller_upper, thetaNames, thetaValues, allThetaNodes); 262 constraintTrees.Add(df_prepared); 263 } 264 if (constraint.Interval.LowerBound > double.NegativeInfinity) { 265 var f_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)treeForDerivation.Clone()); 266 // convert variables named theta back to constants 267 var df_prepared = ReplaceVarWithConst(f_larger_lower, thetaNames, thetaValues, allThetaNodes); 268 constraintTrees.Add(df_prepared); 269 } 270 } 271 } 272 273 var preparedTree = ReplaceVarWithConst(treeForDerivation, thetaNames, thetaValues, allThetaNodes); 274 275 276 // local function 277 void UpdateThetaValues(double[] theta) { 278 for (int i = 0; i < theta.Length; ++i) { 279 foreach (var constNode in allThetaNodes[i]) constNode.Value = theta[i]; 280 } 281 } 282 283 // buffers for calculate_jacobian 284 var target = problemData.TargetVariableTrainingValues.ToArray(); 285 var fi_eval = new double[target.Length]; 286 var jac_eval = new double[target.Length, thetaValues.Count]; 287 288 // define the callback used by the alglib optimizer 289 // the x argument for this callback represents our theta 290 // local function 291 void calculate_jacobian(double[] x, double[] fi, double[,] jac, object obj) { 292 UpdateThetaValues(x); 293 294 var autoDiffEval = new VectorAutoDiffEvaluator(); 295 autoDiffEval.Evaluate(preparedTree, problemData.Dataset, problemData.TrainingIndices.ToArray(), 296 GetParameterNodes(preparedTree, allThetaNodes), fi_eval, jac_eval); 297 298 // calc sum of squared errors and gradient 299 var sse = 0.0; 300 var g = new double[x.Length]; 301 for (int i = 0; i < target.Length; i++) { 302 var res = target[i] - fi_eval[i]; 303 sse += 0.5 * res * res; 304 for (int j = 0; j < g.Length; j++) { 305 g[j] -= res * jac_eval[i, j]; 306 } 307 } 308 309 fi[0] = sse / target.Length; 310 for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j] / target.Length; } 311 312 var intervalEvaluator = new IntervalEvaluator(); 313 for (int i = 0; i < constraintTrees.Count; i++) { 314 var interval = intervalEvaluator.Evaluate(constraintTrees[i], dataIntervals, GetParameterNodes(constraintTrees[i], allThetaNodes), 315 out double[] lowerGradient, out double[] upperGradient); 316 317 // we transformed this to a constraint c(x) <= 0, so only the upper bound is relevant for us 318 fi[i + 1] = interval.UpperBound; 319 for (int j = 0; j < x.Length; j++) { 320 jac[i + 1, j] = upperGradient[j]; 321 } 322 } 323 } 324 325 326 327 alglib.minnlcstate state; 328 alglib.minnlcreport rep; 267 329 try { 268 alglib.minlmcreatevj(n, c, out state); 269 alglib.minlmsetcond(state, 0.0, maxIterations); 270 alglib.minlmsetxrep(state, iterationCallback != null); 271 // alglib.minlmsetgradientcheck(state, 0.001); 272 alglib.minlmoptimize(state, function_cx_1_func, function_cx_1_jac, xrep, rowEvaluationsCounter); 273 alglib.minlmresults(state, out c, out rep); 330 alglib.minnlccreate(thetaValues.Count, thetaValues.ToArray(), out state); 331 alglib.minnlcsetalgoslp(state); // SLP is more robust but slower 332 alglib.minnlcsetcond(state, 0, maxIterations); 333 var s = Enumerable.Repeat(1d, thetaValues.Count).ToArray(); // scale is set to unit scale 334 alglib.minnlcsetscale(state, s); 335 336 // set non-linear constraints: 0 equality constraints, constraintTrees inequality constraints 337 alglib.minnlcsetnlc(state, 0, constraintTrees.Count); 338 339 alglib.minnlcoptimize(state, calculate_jacobian, null, null); 340 alglib.minnlcresults(state, out double[] xOpt, out rep); 341 342 343 // counter.FunctionEvaluations += rep.nfev; TODO 344 counter.GradientEvaluations += rep.nfev; 345 346 if (rep.terminationtype != -8) { 347 // update parameters in tree 348 var pIdx = 0; 349 foreach (var node in tree.IterateNodesPostfix().OfType<ConstantTreeNode>()) { 350 node.Value = xOpt[pIdx++]; 351 } 352 353 // note: we keep the optimized constants even when the tree is worse. 354 } 355 274 356 } catch (ArithmeticException) { 275 return originalQuality; 357 // eval MSE of original tree 358 return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); 359 276 360 } catch (alglib.alglibexception) { 277 return originalQuality; 278 } 279 280 counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n; 281 counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n; 282 283 //retVal == -7 => constant optimization failed due to wrong gradient 284 if (rep.terminationtype != -7) { 285 if (applyLinearScaling) { 286 var tmp = new double[c.Length - 2]; 287 Array.Copy(c, 2, tmp, 0, tmp.Length); 288 UpdateConstants(parameterNodes, tmp); 289 } else UpdateConstants(parameterNodes, c); 290 } 291 var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 292 293 if (!updateConstantsInTree) UpdateConstants(parameterNodes, initialConstants); 294 295 if (originalQuality - quality > 0.001 || double.IsNaN(quality)) { 296 UpdateConstants(parameterNodes, initialConstants); 297 return originalQuality; 298 } 299 return quality; 300 } 361 // eval MSE of original tree 362 return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); 363 } 364 365 366 // evaluate tree with updated constants 367 return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); 368 } 369 370 #region helper 371 private static ISymbolicExpressionTreeNode[] GetParameterNodes(ISymbolicExpressionTree tree, List<ConstantTreeNode>[] allNodes) { 372 // TODO better solution necessary 373 var treeConstNodes = tree.IterateNodesPostfix().OfType<ConstantTreeNode>().ToArray(); 374 var paramNodes = new ISymbolicExpressionTreeNode[allNodes.Length]; 375 for (int i = 0; i < paramNodes.Length; i++) { 376 paramNodes[i] = allNodes[i].SingleOrDefault(n => treeConstNodes.Contains(n)); 377 } 378 return paramNodes; 379 } 380 381 private static ISymbolicExpressionTree ReplaceVarWithConst(ISymbolicExpressionTree tree, List<string> thetaNames, List<double> thetaValues, List<ConstantTreeNode>[] thetaNodes) { 382 var copy = (ISymbolicExpressionTree)tree.Clone(); 383 var nodes = copy.IterateNodesPostfix().ToList(); 384 for (int i = 0; i < nodes.Count; i++) { 385 var n = nodes[i] as VariableTreeNode; 386 if (n != null) { 387 var thetaIdx = thetaNames.IndexOf(n.VariableName); 388 if (thetaIdx >= 0) { 389 var parent = n.Parent; 390 if (thetaNodes[thetaIdx].Any()) { 391 // HACK: REUSE CONSTANT TREE NODE IN SEVERAL TREES 392 // we use this trick to allow autodiff over thetas when thetas occurr multiple times in the tree (e.g. in derived trees) 393 var constNode = thetaNodes[thetaIdx].First(); 394 var childIdx = parent.IndexOfSubtree(n); 395 parent.RemoveSubtree(childIdx); 396 parent.InsertSubtree(childIdx, constNode); 397 } else { 398 var constNode = (ConstantTreeNode)CreateConstant(thetaValues[thetaIdx]); 399 var childIdx = parent.IndexOfSubtree(n); 400 parent.RemoveSubtree(childIdx); 401 parent.InsertSubtree(childIdx, constNode); 402 thetaNodes[thetaIdx].Add(constNode); 403 } 404 } 405 } 406 } 407 return copy; 408 } 409 410 private static ISymbolicExpressionTree ReplaceConstWithVar(ISymbolicExpressionTree tree, out List<string> thetaNames, out List<double> thetaValues) { 411 thetaNames = new List<string>(); 412 thetaValues = new List<double>(); 413 var copy = (ISymbolicExpressionTree)tree.Clone(); 414 var nodes = copy.IterateNodesPostfix().ToList(); 415 416 int n = 1; 417 for (int i = 0; i < nodes.Count; ++i) { 418 var node = nodes[i]; 419 if (node is ConstantTreeNode constantTreeNode) { 420 var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); 421 thetaVar.Weight = 1; 422 thetaVar.VariableName = $"θ{n++}"; 423 424 thetaNames.Add(thetaVar.VariableName); 425 thetaValues.Add(constantTreeNode.Value); 426 427 var parent = constantTreeNode.Parent; 428 if (parent != null) { 429 var index = constantTreeNode.Parent.IndexOfSubtree(constantTreeNode); 430 parent.RemoveSubtree(index); 431 parent.InsertSubtree(index, thetaVar); 432 } 433 } 434 } 435 return copy; 436 } 437 438 private static ISymbolicExpressionTreeNode CreateConstant(double value) { 439 var constantNode = (ConstantTreeNode)new Constant().CreateTreeNode(); 440 constantNode.Value = value; 441 return constantNode; 442 } 443 444 private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTree t, ISymbolicExpressionTreeNode b) { 445 var sub = MakeNode<Subtraction>(t.Root.GetSubtree(0).GetSubtree(0), b); 446 t.Root.GetSubtree(0).RemoveSubtree(0); 447 t.Root.GetSubtree(0).InsertSubtree(0, sub); 448 return t; 449 } 450 private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTreeNode b, ISymbolicExpressionTree t) { 451 var sub = MakeNode<Subtraction>(b, t.Root.GetSubtree(0).GetSubtree(0)); 452 t.Root.GetSubtree(0).RemoveSubtree(0); 453 t.Root.GetSubtree(0).InsertSubtree(0, sub); 454 return t; 455 } 456 457 private static ISymbolicExpressionTreeNode MakeNode<T>(params ISymbolicExpressionTreeNode[] fs) where T : ISymbol, new() { 458 var node = new T().CreateTreeNode(); 459 foreach (var f in fs) node.AddSubtree(f); 460 return node; 461 } 462 #endregion 301 463 302 464 private static void UpdateConstants(ISymbolicExpressionTreeNode[] nodes, double[] constants) { 303 465 if (nodes.Length != constants.Length) throw new InvalidOperationException(); 304 for (int i = 0;i<nodes.Length;i++) {466 for (int i = 0; i < nodes.Length; i++) { 305 467 if (nodes[i] is VariableTreeNode varNode) varNode.Weight = constants[i]; 306 468 else if (nodes[i] is ConstantTreeNode constNode) constNode.Value = constants[i]; -
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Extensions.csproj
r16912 r16914 9 9 <AppDesignerFolder>Properties</AppDesignerFolder> 10 10 <RootNamespace>HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Extensions</RootNamespace> 11 <AssemblyName>HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Extensions </AssemblyName>11 <AssemblyName>HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Extensions-0.1</AssemblyName> 12 12 <TargetFrameworkVersion>v4.6.1</TargetFrameworkVersion> 13 13 <FileAlignment>512</FileAlignment> … … 18 18 <DebugType>full</DebugType> 19 19 <Optimize>false</Optimize> 20 <OutputPath>..\ ..\bin\</OutputPath>20 <OutputPath>..\bin\</OutputPath> 21 21 <DefineConstants>DEBUG;TRACE</DefineConstants> 22 22 <ErrorReport>prompt</ErrorReport> … … 26 26 <DebugType>pdbonly</DebugType> 27 27 <Optimize>true</Optimize> 28 <OutputPath>..\ ..\bin\</OutputPath>28 <OutputPath>..\bin\</OutputPath> 29 29 <DefineConstants>TRACE</DefineConstants> 30 30 <ErrorReport>prompt</ErrorReport> … … 40 40 <Reference Include="ALGLIB-3.15.0"> 41 41 <HintPath>..\bin\ALGLIB-3.15.0.dll</HintPath> 42 <Private>False</Private> 42 43 </Reference> 43 44 <Reference Include="HEAL.Attic"> 44 45 <HintPath>..\bin\HEAL.Attic.dll</HintPath> 46 <Private>False</Private> 45 47 </Reference> 46 48 <Reference Include="HeuristicLab.Problems.DataAnalysis-3.4"> 47 49 <HintPath>..\bin\HeuristicLab.Problems.DataAnalysis-3.4.dll</HintPath> 50 <Private>False</Private> 48 51 </Reference> 49 52 <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic-3.4"> 50 53 <HintPath>..\bin\HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.dll</HintPath> 54 <Private>False</Private> 51 55 </Reference> 52 56 <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4"> 53 57 <HintPath>..\bin\HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4.dll</HintPath> 58 <Private>False</Private> 54 59 </Reference> 55 60 <Reference Include="System" /> … … 63 68 </ItemGroup> 64 69 <ItemGroup> 70 <Compile Include="ConstrainedConstantOptimizationEvaluator.cs" /> 71 <Compile Include="Plugin.cs" /> 65 72 <Compile Include="Properties\AssemblyInfo.cs" /> 66 73 <Compile Include="SymbolicRegressionNewConstantOptimizationEvaluator.cs" /> … … 75 82 <Project>{958b43bc-cc5c-4fa2-8628-2b3b01d890b6}</Project> 76 83 <Name>HeuristicLab.Collections-3.3</Name> 84 <Private>False</Private> 77 85 </ProjectReference> 78 86 <ProjectReference Include="..\HeuristicLab.Common\3.3\HeuristicLab.Common-3.3.csproj"> 79 87 <Project>{a9ad58b9-3ef9-4cc1-97e5-8d909039ff5c}</Project> 80 88 <Name>HeuristicLab.Common-3.3</Name> 89 <Private>False</Private> 81 90 </ProjectReference> 82 91 <ProjectReference Include="..\HeuristicLab.Core\3.3\HeuristicLab.Core-3.3.csproj"> 83 92 <Project>{c36bd924-a541-4a00-afa8-41701378ddc5}</Project> 84 93 <Name>HeuristicLab.Core-3.3</Name> 94 <Private>False</Private> 85 95 </ProjectReference> 86 96 <ProjectReference Include="..\HeuristicLab.Data\3.3\HeuristicLab.Data-3.3.csproj"> 87 97 <Project>{bbab9df5-5ef3-4ba8-ade9-b36e82114937}</Project> 88 98 <Name>HeuristicLab.Data-3.3</Name> 99 <Private>False</Private> 89 100 </ProjectReference> 90 101 <ProjectReference Include="..\HeuristicLab.Encodings.SymbolicExpressionTreeEncoding\3.4\HeuristicLab.Encodings.SymbolicExpressionTreeEncoding-3.4.csproj"> 91 102 <Project>{06d4a186-9319-48a0-bade-a2058d462eea}</Project> 92 103 <Name>HeuristicLab.Encodings.SymbolicExpressionTreeEncoding-3.4</Name> 104 <Private>False</Private> 93 105 </ProjectReference> 94 106 <ProjectReference Include="..\HeuristicLab.Operators\3.3\HeuristicLab.Operators-3.3.csproj"> 95 107 <Project>{23da7ff4-d5b8-41b6-aa96-f0561d24f3ee}</Project> 96 108 <Name>HeuristicLab.Operators-3.3</Name> 109 <Private>False</Private> 97 110 </ProjectReference> 98 111 <ProjectReference Include="..\HeuristicLab.Optimization\3.3\HeuristicLab.Optimization-3.3.csproj"> 99 112 <Project>{14ab8d24-25bc-400c-a846-4627aa945192}</Project> 100 113 <Name>HeuristicLab.Optimization-3.3</Name> 114 <Private>False</Private> 101 115 </ProjectReference> 102 116 <ProjectReference Include="..\HeuristicLab.Parameters\3.3\HeuristicLab.Parameters-3.3.csproj"> 103 117 <Project>{56f9106a-079f-4c61-92f6-86a84c2d84b7}</Project> 104 118 <Name>HeuristicLab.Parameters-3.3</Name> 119 <Private>False</Private> 120 </ProjectReference> 121 <ProjectReference Include="..\HeuristicLab.PluginInfrastructure\3.3\HeuristicLab.PluginInfrastructure-3.3.csproj"> 122 <Project>{94186a6a-5176-4402-ae83-886557b53cca}</Project> 123 <Name>HeuristicLab.PluginInfrastructure-3.3</Name> 105 124 </ProjectReference> 106 125 </ItemGroup>
Note: See TracChangeset
for help on using the changeset viewer.