using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Runtime.InteropServices; using HeuristicLab.Common; using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Extensions; namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression { internal class ConstrainedNLSInternal : IDisposable { private readonly int maxIterations; public int MaxIterations => maxIterations; private readonly string solver; public string Solver => solver; private readonly ISymbolicExpressionTree expr; public ISymbolicExpressionTree Expr => expr; private readonly IRegressionProblemData problemData; public IRegressionProblemData ProblemData => problemData; public event Action FunctionEvaluated; public event Action ConstraintEvaluated; private double bestError = double.MaxValue; public double BestError => bestError; private double curError = double.MaxValue; public double CurError => curError; private double[] bestSolution; public double[] BestSolution => bestSolution; private ISymbolicExpressionTree bestTree; public ISymbolicExpressionTree BestTree => bestTree; private double[] bestConstraintValues; public double[] BestConstraintValues => bestConstraintValues; private bool disposed = false; // for debugging (must be in the same order as processed below) public IEnumerable ConstraintDescriptions { get { foreach (var elem in problemData.IntervalConstraints.Constraints) { if (!elem.Enabled) continue; if (elem.Interval.UpperBound < double.PositiveInfinity) { yield return elem.Expression + " < " + elem.Interval.UpperBound; } if (elem.Interval.LowerBound > double.NegativeInfinity) { yield return "-" + elem.Expression + " < " + (-1) * elem.Interval.LowerBound; } } } } public bool CheckGradient { get; internal set; } // begin internal state private IntPtr nlopt; private SymbolicDataAnalysisExpressionTreeLinearInterpreter interpreter; private readonly NLOpt.nlopt_func calculateObjectiveDelegate; // must hold the delegate to prevent GC // private readonly NLOpt.nlopt_precond preconditionDelegate; private readonly IntPtr[] constraintDataPtr; // must hold the objects to prevent GC private readonly NLOpt.nlopt_func[] calculateConstraintDelegates; // must hold the delegates to prevent GC private readonly List thetaValues; private readonly IDictionary dataIntervals; private readonly int[] trainingRows; private readonly double[] target; private readonly ISymbolicExpressionTree preparedTree; private readonly ISymbolicExpressionTreeNode[] preparedTreeParameterNodes; private readonly List[] allThetaNodes; public List constraintTrees; // TODO make local in ctor (public for debugging) private readonly double[] fi_eval; private readonly double[,] jac_eval; private readonly ISymbolicExpressionTree scaledTree; private readonly VectorAutoDiffEvaluator autoDiffEval; private readonly VectorEvaluator eval; private readonly bool invalidProblem = false; // end internal state // for data exchange to/from optimizer in native code [StructLayout(LayoutKind.Sequential)] private struct ConstraintData { public int Idx; public ISymbolicExpressionTree Tree; public ISymbolicExpressionTreeNode[] ParameterNodes; } internal ConstrainedNLSInternal(string solver, ISymbolicExpressionTree expr, int maxIterations, IRegressionProblemData problemData, double ftol_rel = 0, double ftol_abs = 0, double maxTime = 0) { this.solver = solver; this.expr = expr; this.maxIterations = maxIterations; this.problemData = problemData; this.interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter(); this.autoDiffEval = new VectorAutoDiffEvaluator(); this.eval = new VectorEvaluator(); CheckGradient = false; var intervalConstraints = problemData.IntervalConstraints; dataIntervals = problemData.VariableRanges.GetIntervals(); trainingRows = problemData.TrainingIndices.ToArray(); // buffers target = problemData.TargetVariableTrainingValues.ToArray(); var targetStDev = target.StandardDeviationPop(); var targetVariance = targetStDev * targetStDev; var targetMean = target.Average(); var pred = interpreter.GetSymbolicExpressionTreeValues(expr, problemData.Dataset, trainingRows).ToArray(); bestError = targetVariance; if (pred.Any(pi => double.IsInfinity(pi) || double.IsNaN(pi))) { invalidProblem = true; } // all trees are linearly scaled (to improve GP performance) #region linear scaling var predStDev = pred.StandardDeviationPop(); if (predStDev == 0) { invalidProblem = true; } var predMean = pred.Average(); var scalingFactor = targetStDev / predStDev; var offset = targetMean - predMean * scalingFactor; scaledTree = CopyAndScaleTree(expr, scalingFactor, offset); #endregion // convert constants to variables named theta... var treeForDerivation = ReplaceAndExtractParameters(scaledTree, out List thetaNames, out thetaValues); // copies the tree // create trees for relevant derivatives Dictionary derivatives = new Dictionary(); allThetaNodes = thetaNames.Select(_ => new List()).ToArray(); constraintTrees = new List(); foreach (var constraint in intervalConstraints.Constraints) { if (!constraint.Enabled) continue; if (constraint.IsDerivation) { if (!problemData.AllowedInputVariables.Contains(constraint.Variable)) throw new ArgumentException($"Invalid constraint: the variable {constraint.Variable} does not exist in the dataset."); var df = DerivativeCalculator.Derive(treeForDerivation, constraint.Variable); // NLOpt requires constraint expressions of the form c(x) <= 0 // -> we make two expressions, one for the lower bound and one for the upper bound if (constraint.Interval.UpperBound < double.PositiveInfinity) { var df_smaller_upper = Subtract((ISymbolicExpressionTree)df.Clone(), CreateConstant(constraint.Interval.UpperBound)); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(df_smaller_upper, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } if (constraint.Interval.LowerBound > double.NegativeInfinity) { var df_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)df.Clone()); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } } else { if (constraint.Interval.UpperBound < double.PositiveInfinity) { var f_smaller_upper = Subtract((ISymbolicExpressionTree)treeForDerivation.Clone(), CreateConstant(constraint.Interval.UpperBound)); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(f_smaller_upper, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } if (constraint.Interval.LowerBound > double.NegativeInfinity) { var f_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)treeForDerivation.Clone()); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(f_larger_lower, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } } } preparedTree = ReplaceVarWithConst(treeForDerivation, thetaNames, thetaValues, allThetaNodes); preparedTreeParameterNodes = GetParameterNodes(preparedTree, allThetaNodes); var dim = thetaValues.Count; fi_eval = new double[target.Length]; // init buffer; jac_eval = new double[target.Length, dim]; // init buffer var minVal = Math.Min(-1000.0, thetaValues.Min()); var maxVal = Math.Max(1000.0, thetaValues.Max()); var lb = Enumerable.Repeat(minVal, thetaValues.Count).ToArray(); var up = Enumerable.Repeat(maxVal, thetaValues.Count).ToArray(); nlopt = NLOpt.nlopt_create(GetSolver(solver), (uint)dim); NLOpt.nlopt_set_lower_bounds(nlopt, lb); NLOpt.nlopt_set_upper_bounds(nlopt, up); calculateObjectiveDelegate = new NLOpt.nlopt_func(CalculateObjective); // keep a reference to the delegate (see below) NLOpt.nlopt_set_min_objective(nlopt, calculateObjectiveDelegate, IntPtr.Zero); // --> without preconditioning //preconditionDelegate = new NLOpt.nlopt_precond(PreconditionObjective); //NLOpt.nlopt_set_precond_min_objective(nlopt, calculateObjectiveDelegate, preconditionDelegate, IntPtr.Zero); constraintDataPtr = new IntPtr[constraintTrees.Count]; calculateConstraintDelegates = new NLOpt.nlopt_func[constraintTrees.Count]; // make sure we keep a reference to the delegates (otherwise GC will free delegate objects see https://stackoverflow.com/questions/7302045/callback-delegates-being-collected#7302258) for (int i = 0; i < constraintTrees.Count; i++) { var constraintData = new ConstraintData() { Idx = i, Tree = constraintTrees[i], ParameterNodes = GetParameterNodes(constraintTrees[i], allThetaNodes) }; constraintDataPtr[i] = Marshal.AllocHGlobal(Marshal.SizeOf()); Marshal.StructureToPtr(constraintData, constraintDataPtr[i], fDeleteOld: false); calculateConstraintDelegates[i] = new NLOpt.nlopt_func(CalculateConstraint); NLOpt.nlopt_add_inequality_constraint(nlopt, calculateConstraintDelegates[i], constraintDataPtr[i], 1e-8); // NLOpt.nlopt_add_precond_inequality_constraint(nlopt, calculateConstraintDelegates[i], preconditionDelegate, constraintDataPtr[i], 1e-8); } NLOpt.nlopt_set_ftol_rel(nlopt, ftol_rel); NLOpt.nlopt_set_ftol_abs(nlopt, ftol_abs); NLOpt.nlopt_set_maxtime(nlopt, maxTime); NLOpt.nlopt_set_maxeval(nlopt, maxIterations); } ~ConstrainedNLSInternal() { Dispose(false); } public enum OptimizationMode { ReadOnly, UpdateParameters, UpdateParametersAndKeepLinearScaling }; internal void Optimize(OptimizationMode mode) { if (invalidProblem) return; var x = thetaValues.ToArray(); /* initial guess */ double minf = double.MaxValue; /* minimum objective value upon return */ var res = NLOpt.nlopt_optimize(nlopt, x, ref minf); if (res < 0 && res != NLOpt.nlopt_result.NLOPT_FORCED_STOP) { // throw new InvalidOperationException($"NLOpt failed {res} {NLOpt.nlopt_get_errmsg(nlopt)}"); return; } else /*if ( minf <= bestError ) */{ bestSolution = x; bestError = minf; // calculate constraints of final solution double[] _ = new double[x.Length]; bestConstraintValues = new double[calculateConstraintDelegates.Length]; for (int i = 0; i < calculateConstraintDelegates.Length; i++) { bestConstraintValues[i] = calculateConstraintDelegates[i].Invoke((uint)x.Length, x, _, constraintDataPtr[i]); } // update parameters in tree UpdateParametersInTree(scaledTree, x); if (mode == OptimizationMode.UpdateParameters) { // update original expression (when called from evaluator we want to write back optimized parameters) expr.Root.GetSubtree(0).RemoveSubtree(0); // delete old tree expr.Root.GetSubtree(0).InsertSubtree(0, scaledTree.Root.GetSubtree(0).GetSubtree(0).GetSubtree(0).GetSubtree(0) // insert the optimized sub-tree (without scaling nodes) ); } else if (mode == OptimizationMode.UpdateParametersAndKeepLinearScaling) { expr.Root.GetSubtree(0).RemoveSubtree(0); // delete old tree expr.Root.GetSubtree(0).InsertSubtree(0, scaledTree.Root.GetSubtree(0).GetSubtree(0)); // insert the optimized sub-tree (including scaling nodes) } } bestTree = expr; } double CalculateObjective(uint dim, double[] curX, double[] grad, IntPtr data) { UpdateThetaValues(curX); var sse = 0.0; if (grad != null) { autoDiffEval.Evaluate(preparedTree, problemData.Dataset, trainingRows, preparedTreeParameterNodes, fi_eval, jac_eval); // calc sum of squared errors and gradient for (int j = 0; j < grad.Length; j++) grad[j] = 0; for (int i = 0; i < target.Length; i++) { var r = target[i] - fi_eval[i]; sse += r * r; for (int j = 0; j < grad.Length; j++) { grad[j] -= 2 * r * jac_eval[i, j]; } } // average for (int j = 0; j < grad.Length; j++) { grad[j] /= target.Length; } #region check gradient if (grad != null && CheckGradient) { for (int i = 0; i < dim; i++) { // make two additional evaluations var xForNumDiff = (double[])curX.Clone(); double delta = Math.Abs(xForNumDiff[i] * 1e-5); xForNumDiff[i] += delta; UpdateThetaValues(xForNumDiff); var evalHigh = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); var mseHigh = MSE(target, evalHigh); xForNumDiff[i] = curX[i] - delta; UpdateThetaValues(xForNumDiff); var evalLow = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); var mseLow = MSE(target, evalLow); var numericDiff = (mseHigh - mseLow) / (2 * delta); var autoDiff = grad[i]; if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) throw new InvalidProgramException(); } } #endregion } else { var eval = new VectorEvaluator(); var prediction = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); // calc sum of squared errors sse = 0.0; for (int i = 0; i < target.Length; i++) { var r = target[i] - prediction[i]; sse += r * r; } } UpdateBestSolution(sse / target.Length, curX); RaiseFunctionEvaluated(); if (double.IsNaN(sse)) { if (grad != null) Array.Clear(grad, 0, grad.Length); return double.MaxValue; } return sse / target.Length; } // TODO // private void PreconditionObjective(uint n, double[] x, double[] v, double[] vpre, IntPtr data) { // UpdateThetaValues(x); // calc H(x) // // autoDiffEval.Evaluate(preparedTree, problemData.Dataset, trainingRows, // preparedTreeParameterNodes, fi_eval, jac_eval); // var k = jac_eval.GetLength(0); // var h = new double[n, n]; // // // calc residuals and scale jac_eval // var f = 2.0 / (k*k); // // // approximate hessian H(x) = J(x)^T * J(x) // alglib.rmatrixgemm((int)n, (int)n, k, // f, jac_eval, 0, 0, 1, // transposed // jac_eval, 0, 0, 0, // 0.0, ref h, 0, 0, // null // ); // // // // scale v // alglib.rmatrixmv((int)n, (int)n, h, 0, 0, 0, v, 0, ref vpre, 0, alglib.serial); // // // alglib.spdmatrixcholesky(ref h, (int)n, true); // // var det = alglib.matdet.spdmatrixcholeskydet(h, (int)n, alglib.serial); // } private double MSE(double[] a, double[] b) { Trace.Assert(a.Length == b.Length); var sse = 0.0; for (int i = 0; i < a.Length; i++) sse += (a[i] - b[i]) * (a[i] - b[i]); return sse / a.Length; } private void UpdateBestSolution(double curF, double[] curX) { if (double.IsNaN(curF) || double.IsInfinity(curF)) return; else if (curF < bestError) { bestError = curF; bestSolution = (double[])curX.Clone(); } curError = curF; } private void UpdateConstraintViolations(int constraintIdx, double value) { if (double.IsNaN(value) || double.IsInfinity(value)) return; RaiseConstraintEvaluated(constraintIdx, value); // else if (curF < bestError) { // bestError = curF; // bestSolution = (double[])curX.Clone(); // } } double CalculateConstraint(uint dim, double[] curX, double[] grad, IntPtr data) { UpdateThetaValues(curX); var intervalEvaluator = new IntervalEvaluator(); var refIntervalEvaluator = new IntervalInterpreter(); var constraintData = Marshal.PtrToStructure(data); if (grad != null) Array.Clear(grad, 0, grad.Length); var interval = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, out double[] lowerGradient, out double[] upperGradient); var refInterval = refIntervalEvaluator.GetSymbolicExpressionTreeInterval(constraintData.Tree, dataIntervals); if (Math.Abs(interval.LowerBound - refInterval.LowerBound) > Math.Abs(interval.LowerBound) * 1e-4) throw new InvalidProgramException($"Intervals don't match. {interval.LowerBound} <> {refInterval.LowerBound}"); if (Math.Abs(interval.UpperBound - refInterval.UpperBound) > Math.Abs(interval.UpperBound) * 1e-4) throw new InvalidProgramException($"Intervals don't match. {interval.UpperBound} <> {refInterval.UpperBound}"); // we transformed this to a constraint c(x) <= 0, so only the upper bound is relevant for us if (grad != null) for (int j = 0; j < grad.Length; j++) { grad[j] = upperGradient[j]; } #region check gradient if (grad != null && CheckGradient) for (int i = 0; i < dim; i++) { // make two additional evaluations var xForNumDiff = (double[])curX.Clone(); double delta = Math.Abs(xForNumDiff[i] * 1e-5); xForNumDiff[i] += delta; UpdateThetaValues(xForNumDiff); var evalHigh = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, out double[] unusedLowerGradientHigh, out double[] unusedUpperGradientHigh); xForNumDiff[i] = curX[i] - delta; UpdateThetaValues(xForNumDiff); var evalLow = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, out double[] unusedLowerGradientLow, out double[] unusedUpperGradientLow); var numericDiff = (evalHigh.UpperBound - evalLow.UpperBound) / (2 * delta); var autoDiff = grad[i]; if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) throw new InvalidProgramException(); } #endregion UpdateConstraintViolations(constraintData.Idx, interval.UpperBound); if (double.IsNaN(interval.UpperBound)) { if (grad != null) Array.Clear(grad, 0, grad.Length); return double.MaxValue; } else return interval.UpperBound; } void UpdateThetaValues(double[] theta) { for (int i = 0; i < theta.Length; ++i) { foreach (var constNode in allThetaNodes[i]) constNode.Value = theta[i]; } } internal void RequestStop() { NLOpt.nlopt_set_force_stop(nlopt, 1); // hopefully NLOpt is thread safe , val must be <> 0 otherwise no effect } private void RaiseFunctionEvaluated() { FunctionEvaluated?.Invoke(); } private void RaiseConstraintEvaluated(int idx, double value) { ConstraintEvaluated?.Invoke(idx, value); } #region helper private static ISymbolicExpressionTree CopyAndScaleTree(ISymbolicExpressionTree tree, double scalingFactor, double offset) { var m = (ISymbolicExpressionTree)tree.Clone(); var add = MakeNode(MakeNode(m.Root.GetSubtree(0).GetSubtree(0), CreateConstant(scalingFactor)), CreateConstant(offset)); m.Root.GetSubtree(0).RemoveSubtree(0); m.Root.GetSubtree(0).AddSubtree(add); return m; } private NLOpt.nlopt_algorithm GetSolver(string solver) { if (solver.Contains("MMA")) return NLOpt.nlopt_algorithm.NLOPT_LD_MMA; if (solver.Contains("COBYLA")) return NLOpt.nlopt_algorithm.NLOPT_LN_COBYLA; if (solver.Contains("CCSAQ")) return NLOpt.nlopt_algorithm.NLOPT_LD_CCSAQ; if (solver.Contains("ISRES")) return NLOpt.nlopt_algorithm.NLOPT_GN_ISRES; if (solver.Contains("DIRECT_G")) return NLOpt.nlopt_algorithm.NLOPT_GN_DIRECT; if (solver.Contains("NLOPT_GN_DIRECT_L")) return NLOpt.nlopt_algorithm.NLOPT_GN_DIRECT_L; if (solver.Contains("NLOPT_GN_DIRECT_L_RAND")) return NLOpt.nlopt_algorithm.NLOPT_GN_DIRECT_L_RAND; if (solver.Contains("NLOPT_GN_ORIG_DIRECT")) return NLOpt.nlopt_algorithm.NLOPT_GN_DIRECT; if (solver.Contains("NLOPT_GN_ORIG_DIRECT_L")) return NLOpt.nlopt_algorithm.NLOPT_GN_ORIG_DIRECT_L; if (solver.Contains("NLOPT_GD_STOGO")) return NLOpt.nlopt_algorithm.NLOPT_GD_STOGO; if (solver.Contains("NLOPT_GD_STOGO_RAND")) return NLOpt.nlopt_algorithm.NLOPT_GD_STOGO_RAND; if (solver.Contains("NLOPT_LD_LBFGS_NOCEDAL")) return NLOpt.nlopt_algorithm.NLOPT_LD_LBFGS_NOCEDAL; if (solver.Contains("NLOPT_LD_LBFGS")) return NLOpt.nlopt_algorithm.NLOPT_LD_LBFGS; if (solver.Contains("NLOPT_LN_PRAXIS")) return NLOpt.nlopt_algorithm.NLOPT_LN_PRAXIS; if (solver.Contains("NLOPT_LD_VAR1")) return NLOpt.nlopt_algorithm.NLOPT_LD_VAR1; if (solver.Contains("NLOPT_LD_VAR2")) return NLOpt.nlopt_algorithm.NLOPT_LD_VAR2; if (solver.Contains("NLOPT_LD_TNEWTON")) return NLOpt.nlopt_algorithm.NLOPT_LD_TNEWTON; if (solver.Contains("NLOPT_LD_TNEWTON_RESTART")) return NLOpt.nlopt_algorithm.NLOPT_LD_TNEWTON_RESTART; if (solver.Contains("NLOPT_LD_TNEWTON_PRECOND")) return NLOpt.nlopt_algorithm.NLOPT_LD_TNEWTON_PRECOND; if (solver.Contains("NLOPT_LD_TNEWTON_PRECOND_RESTART")) return NLOpt.nlopt_algorithm.NLOPT_LD_TNEWTON_PRECOND_RESTART; if (solver.Contains("NLOPT_GN_CRS2_LM")) return NLOpt.nlopt_algorithm.NLOPT_GN_CRS2_LM; if (solver.Contains("NLOPT_GN_MLSL")) return NLOpt.nlopt_algorithm.NLOPT_GN_MLSL; if (solver.Contains("NLOPT_GD_MLSL")) return NLOpt.nlopt_algorithm.NLOPT_GD_MLSL; if (solver.Contains("NLOPT_GN_MLSL_LDS")) return NLOpt.nlopt_algorithm.NLOPT_GN_MLSL_LDS; if (solver.Contains("NLOPT_GD_MLSL_LDS")) return NLOpt.nlopt_algorithm.NLOPT_GD_MLSL_LDS; if (solver.Contains("NLOPT_LN_NEWUOA")) return NLOpt.nlopt_algorithm.NLOPT_LN_NEWUOA; if (solver.Contains("NLOPT_LN_NEWUOA_BOUND")) return NLOpt.nlopt_algorithm.NLOPT_LN_NEWUOA_BOUND; if (solver.Contains("NLOPT_LN_NELDERMEAD")) return NLOpt.nlopt_algorithm.NLOPT_LN_NELDERMEAD; if (solver.Contains("NLOPT_LN_SBPLX")) return NLOpt.nlopt_algorithm.NLOPT_LN_SBPLX; if (solver.Contains("NLOPT_LN_AUGLAG")) return NLOpt.nlopt_algorithm.NLOPT_LN_AUGLAG; if (solver.Contains("NLOPT_LD_AUGLAG")) return NLOpt.nlopt_algorithm.NLOPT_LD_AUGLAG; if (solver.Contains("NLOPT_LN_BOBYQA")) return NLOpt.nlopt_algorithm.NLOPT_LN_BOBYQA; if (solver.Contains("NLOPT_AUGLAG")) return NLOpt.nlopt_algorithm.NLOPT_AUGLAG; if (solver.Contains("NLOPT_LD_SLSQP")) return NLOpt.nlopt_algorithm.NLOPT_LD_SLSQP; if (solver.Contains("NLOPT_LD_CCSAQ))")) return NLOpt.nlopt_algorithm.NLOPT_LD_CCSAQ; if (solver.Contains("NLOPT_GN_ESCH")) return NLOpt.nlopt_algorithm.NLOPT_GN_ESCH; if (solver.Contains("NLOPT_GN_AGS")) return NLOpt.nlopt_algorithm.NLOPT_GN_AGS; throw new ArgumentException($"Unknown solver {solver}"); } // determines the nodes over which we can calculate the partial derivative // this is different from the vector of all parameters because not every tree contains all parameters private static ISymbolicExpressionTreeNode[] GetParameterNodes(ISymbolicExpressionTree tree, List[] allNodes) { // TODO better solution necessary var treeConstNodes = tree.IterateNodesPostfix().OfType().ToArray(); var paramNodes = new ISymbolicExpressionTreeNode[allNodes.Length]; for (int i = 0; i < paramNodes.Length; i++) { paramNodes[i] = allNodes[i].SingleOrDefault(n => treeConstNodes.Contains(n)); } return paramNodes; } private static ISymbolicExpressionTree ReplaceVarWithConst(ISymbolicExpressionTree tree, List thetaNames, List thetaValues, List[] thetaNodes) { var copy = (ISymbolicExpressionTree)tree.Clone(); var nodes = copy.IterateNodesPostfix().ToList(); for (int i = 0; i < nodes.Count; i++) { var n = nodes[i] as VariableTreeNode; if (n != null) { var thetaIdx = thetaNames.IndexOf(n.VariableName); if (thetaIdx >= 0) { var parent = n.Parent; if (thetaNodes[thetaIdx].Any()) { // HACK: REUSE CONSTANT TREE NODE IN SEVERAL TREES // we use this trick to allow autodiff over thetas when thetas occurr multiple times in the tree (e.g. in derived trees) var constNode = thetaNodes[thetaIdx].First(); var childIdx = parent.IndexOfSubtree(n); parent.RemoveSubtree(childIdx); parent.InsertSubtree(childIdx, constNode); } else { var constNode = (ConstantTreeNode)CreateConstant(thetaValues[thetaIdx]); var childIdx = parent.IndexOfSubtree(n); parent.RemoveSubtree(childIdx); parent.InsertSubtree(childIdx, constNode); thetaNodes[thetaIdx].Add(constNode); } } } } return copy; } private void UpdateParametersInTree(ISymbolicExpressionTree scaledTree, double[] x) { var pIdx = 0; // here we lose the two last parameters (for linear scaling) foreach (var node in scaledTree.IterateNodesPostfix()) { if (node is ConstantTreeNode constTreeNode) { constTreeNode.Value = x[pIdx++]; } else if (node is VariableTreeNode varTreeNode) { if (varTreeNode.Weight != 1.0) // see ReplaceAndExtractParameters varTreeNode.Weight = x[pIdx++]; } } if (pIdx != x.Length) throw new InvalidProgramException(); } private static ISymbolicExpressionTree ReplaceAndExtractParameters(ISymbolicExpressionTree tree, out List thetaNames, out List thetaValues) { thetaNames = new List(); thetaValues = new List(); var copy = (ISymbolicExpressionTree)tree.Clone(); var nodes = copy.IterateNodesPostfix().ToList(); int n = 1; for (int i = 0; i < nodes.Count; ++i) { var node = nodes[i]; if (node is ConstantTreeNode constantTreeNode) { var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); thetaVar.Weight = 1; thetaVar.VariableName = $"θ{n++}"; thetaNames.Add(thetaVar.VariableName); thetaValues.Add(constantTreeNode.Value); var parent = constantTreeNode.Parent; if (parent != null) { var index = constantTreeNode.Parent.IndexOfSubtree(constantTreeNode); parent.RemoveSubtree(index); parent.InsertSubtree(index, thetaVar); } } if (node is VariableTreeNode varTreeNode) { if (varTreeNode.Weight == 1) continue; // NOTE: here we assume that we do not tune variable weights when they are originally exactly 1 because we assume that the tree has been parsed and the tree explicitly has the structure w * var var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); thetaVar.Weight = 1; thetaVar.VariableName = $"θ{n++}"; thetaNames.Add(thetaVar.VariableName); thetaValues.Add(varTreeNode.Weight); var parent = varTreeNode.Parent; if (parent != null) { var index = varTreeNode.Parent.IndexOfSubtree(varTreeNode); parent.RemoveSubtree(index); var prodNode = MakeNode(); varTreeNode.Weight = 1.0; prodNode.AddSubtree(varTreeNode); prodNode.AddSubtree(thetaVar); parent.InsertSubtree(index, prodNode); } } } return copy; } private static ISymbolicExpressionTreeNode CreateConstant(double value) { var constantNode = (ConstantTreeNode)new Constant().CreateTreeNode(); constantNode.Value = value; return constantNode; } private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTree t, ISymbolicExpressionTreeNode b) { var sub = MakeNode(t.Root.GetSubtree(0).GetSubtree(0), b); t.Root.GetSubtree(0).RemoveSubtree(0); t.Root.GetSubtree(0).InsertSubtree(0, sub); return t; } private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTreeNode b, ISymbolicExpressionTree t) { var sub = MakeNode(b, t.Root.GetSubtree(0).GetSubtree(0)); t.Root.GetSubtree(0).RemoveSubtree(0); t.Root.GetSubtree(0).InsertSubtree(0, sub); return t; } private static ISymbolicExpressionTreeNode MakeNode(params ISymbolicExpressionTreeNode[] fs) where T : ISymbol, new() { var node = new T().CreateTreeNode(); foreach (var f in fs) node.AddSubtree(f); return node; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { if (disposed) return; if (disposing) { // Free any other managed objects here. } // Free any unmanaged objects here. if (nlopt != IntPtr.Zero) { NLOpt.nlopt_destroy(nlopt); nlopt = IntPtr.Zero; } if (constraintDataPtr != null) { for (int i = 0; i < constraintDataPtr.Length; i++) if (constraintDataPtr[i] != IntPtr.Zero) { Marshal.FreeHGlobal(constraintDataPtr[i]); constraintDataPtr[i] = IntPtr.Zero; } } disposed = true; } #endregion } }