#region License Information /* HeuristicLab * Copyright (C) 2002-2019 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Collections.Generic; using System.Linq; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; using HeuristicLab.Optimization; using HeuristicLab.Parameters; using HEAL.Attic; namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression { [Item("Constant Optimization Evaluator (with constraints)", "")] [StorableType("A8958E06-C54A-4193-862E-8315C86EB5C1")] public class ConstrainedConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator { private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations"; private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement"; private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability"; private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage"; private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree"; private const string UpdateVariableWeightsParameterName = "Update Variable Weights"; private const string FunctionEvaluationsResultParameterName = "Constants Optimization Function Evaluations"; private const string GradientEvaluationsResultParameterName = "Constants Optimization Gradient Evaluations"; private const string CountEvaluationsParameterName = "Count Function and Gradient Evaluations"; public IFixedValueParameter ConstantOptimizationIterationsParameter { get { return (IFixedValueParameter)Parameters[ConstantOptimizationIterationsParameterName]; } } public IFixedValueParameter ConstantOptimizationImprovementParameter { get { return (IFixedValueParameter)Parameters[ConstantOptimizationImprovementParameterName]; } } public IFixedValueParameter ConstantOptimizationProbabilityParameter { get { return (IFixedValueParameter)Parameters[ConstantOptimizationProbabilityParameterName]; } } public IFixedValueParameter ConstantOptimizationRowsPercentageParameter { get { return (IFixedValueParameter)Parameters[ConstantOptimizationRowsPercentageParameterName]; } } public IFixedValueParameter UpdateConstantsInTreeParameter { get { return (IFixedValueParameter)Parameters[UpdateConstantsInTreeParameterName]; } } public IFixedValueParameter UpdateVariableWeightsParameter { get { return (IFixedValueParameter)Parameters[UpdateVariableWeightsParameterName]; } } public IResultParameter FunctionEvaluationsResultParameter { get { return (IResultParameter)Parameters[FunctionEvaluationsResultParameterName]; } } public IResultParameter GradientEvaluationsResultParameter { get { return (IResultParameter)Parameters[GradientEvaluationsResultParameterName]; } } public IFixedValueParameter CountEvaluationsParameter { get { return (IFixedValueParameter)Parameters[CountEvaluationsParameterName]; } } public IConstrainedValueParameter SolverParameter { get { return (IConstrainedValueParameter)Parameters["Solver"]; } } public IntValue ConstantOptimizationIterations { get { return ConstantOptimizationIterationsParameter.Value; } } public DoubleValue ConstantOptimizationImprovement { get { return ConstantOptimizationImprovementParameter.Value; } } public PercentValue ConstantOptimizationProbability { get { return ConstantOptimizationProbabilityParameter.Value; } } public PercentValue ConstantOptimizationRowsPercentage { get { return ConstantOptimizationRowsPercentageParameter.Value; } } public bool UpdateConstantsInTree { get { return UpdateConstantsInTreeParameter.Value.Value; } set { UpdateConstantsInTreeParameter.Value.Value = value; } } public bool UpdateVariableWeights { get { return UpdateVariableWeightsParameter.Value.Value; } set { UpdateVariableWeightsParameter.Value.Value = value; } } public bool CountEvaluations { get { return CountEvaluationsParameter.Value.Value; } set { CountEvaluationsParameter.Value.Value = value; } } public string Solver { get { return SolverParameter.Value.Value; } } public override bool Maximization { get { return false; } } [StorableConstructor] protected ConstrainedConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { } protected ConstrainedConstantOptimizationEvaluator(ConstrainedConstantOptimizationEvaluator original, Cloner cloner) : base(original, cloner) { } public ConstrainedConstantOptimizationEvaluator() : base() { Parameters.Add(new FixedValueParameter(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10))); Parameters.Add(new FixedValueParameter(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true }); Parameters.Add(new FixedValueParameter(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1))); Parameters.Add(new FixedValueParameter(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1))); Parameters.Add(new FixedValueParameter(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true }); Parameters.Add(new FixedValueParameter(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)) { Hidden = true }); Parameters.Add(new FixedValueParameter(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false))); var validSolvers = new ItemSet(new[] { "non-smooth (minns)", "sequential linear programming (minnlc)" }.Select(s => new StringValue(s).AsReadOnly())); Parameters.Add(new ConstrainedValueParameter("Solver", "The solver algorithm", validSolvers, validSolvers.First())); Parameters.Add(new ResultParameter(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue())); Parameters.Add(new ResultParameter(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue())); } public override IDeepCloneable Clone(Cloner cloner) { return new ConstrainedConstantOptimizationEvaluator(this, cloner); } [StorableHook(HookType.AfterDeserialization)] private void AfterDeserialization() { } private static readonly object locker = new object(); public override IOperation InstrumentedApply() { var solution = SymbolicExpressionTreeParameter.ActualValue; double quality; if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) { IEnumerable constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value); var counter = new EvaluationsCounter(); quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue, constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, Solver, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter); if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) { var evaluationRows = GenerateRowsToEvaluate(); quality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, double.MinValue, double.MaxValue, ProblemDataParameter.ActualValue, evaluationRows, applyLinearScaling: false); } if (CountEvaluations) { lock (locker) { FunctionEvaluationsResultParameter.ActualValue.Value += counter.FunctionEvaluations; GradientEvaluationsResultParameter.ActualValue.Value += counter.GradientEvaluations; } } } else { var evaluationRows = GenerateRowsToEvaluate(); quality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, double.MinValue, double.MaxValue, ProblemDataParameter.ActualValue, evaluationRows, applyLinearScaling: false); } QualityParameter.ActualValue = new DoubleValue(quality); return base.InstrumentedApply(); } public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable rows) { SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context; EstimationLimitsParameter.ExecutionContext = context; ApplyLinearScalingParameter.ExecutionContext = context; FunctionEvaluationsResultParameter.ExecutionContext = context; GradientEvaluationsResultParameter.ExecutionContext = context; // MSE evaluator is used on purpose instead of the const-opt evaluator, // because Evaluate() is used to get the quality of evolved models on // different partitions of the dataset (e.g., best validation model) double mse = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, double.MinValue, double.MaxValue, problemData, rows, applyLinearScaling: false); SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null; EstimationLimitsParameter.ExecutionContext = null; ApplyLinearScalingParameter.ExecutionContext = null; FunctionEvaluationsResultParameter.ExecutionContext = null; GradientEvaluationsResultParameter.ExecutionContext = null; return mse; } public class EvaluationsCounter { public int FunctionEvaluations = 0; public int GradientEvaluations = 0; } private static void GetParameterNodes(ISymbolicExpressionTree tree, out List thetaNodes, out List thetaValues) { thetaNodes = new List(); thetaValues = new List(); var nodes = tree.IterateNodesPrefix().ToArray(); for (int i = 0; i < nodes.Length; ++i) { var node = nodes[i]; if (node is VariableTreeNode variableTreeNode) { thetaValues.Add(variableTreeNode.Weight); thetaNodes.Add(node); } else if (node is ConstantTreeNode constantTreeNode) { thetaNodes.Add(node); thetaValues.Add(constantTreeNode.Value); } } } public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable rows, bool applyLinearScaling, string solver, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true, Action iterationCallback = null, EvaluationsCounter counter = null) { if (!updateVariableWeights) throw new NotSupportedException("not updating variable weights is not supported"); if (!updateConstantsInTree) throw new NotSupportedException("not updating tree parameters is not supported"); if (applyLinearScaling) throw new NotSupportedException("linear scaling is not supported"); // we always update constants, so we don't need to calculate initial quality // double originalQuality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); if (counter == null) counter = new EvaluationsCounter(); var rowEvaluationsCounter = new EvaluationsCounter(); var intervalConstraints = problemData.IntervalConstraints; var dataIntervals = problemData.VariableRanges.GetIntervals(); // convert constants to variables named theta... var treeForDerivation = ReplaceConstWithVar(tree, out List thetaNames, out List thetaValues); // copies the tree // create trees for relevant derivatives Dictionary derivatives = new Dictionary(); var allThetaNodes = thetaNames.Select(_ => new List()).ToArray(); var constraintTrees = new List(); foreach (var constraint in intervalConstraints.Constraints) { if (constraint.IsDerivation) { if (!problemData.AllowedInputVariables.Contains(constraint.Variable)) throw new ArgumentException($"Invalid constraint: the variable {constraint.Variable} does not exist in the dataset."); var df = DerivativeCalculator.Derive(treeForDerivation, constraint.Variable); // alglib requires constraint expressions of the form c(x) <= 0 // -> we make two expressions, one for the lower bound and one for the upper bound if (constraint.Interval.UpperBound < double.PositiveInfinity) { var df_smaller_upper = Subtract((ISymbolicExpressionTree)df.Clone(), CreateConstant(constraint.Interval.UpperBound)); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(df_smaller_upper, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } if (constraint.Interval.LowerBound > double.NegativeInfinity) { var df_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)df.Clone()); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } } else { if (constraint.Interval.UpperBound < double.PositiveInfinity) { var f_smaller_upper = Subtract((ISymbolicExpressionTree)treeForDerivation.Clone(), CreateConstant(constraint.Interval.UpperBound)); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(f_smaller_upper, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } if (constraint.Interval.LowerBound > double.NegativeInfinity) { var f_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)treeForDerivation.Clone()); // convert variables named theta back to constants var df_prepared = ReplaceVarWithConst(f_larger_lower, thetaNames, thetaValues, allThetaNodes); constraintTrees.Add(df_prepared); } } } var preparedTree = ReplaceVarWithConst(treeForDerivation, thetaNames, thetaValues, allThetaNodes); // local function void UpdateThetaValues(double[] theta) { for (int i = 0; i < theta.Length; ++i) { foreach (var constNode in allThetaNodes[i]) constNode.Value = theta[i]; } } // buffers for calculate_jacobian var target = problemData.TargetVariableTrainingValues.ToArray(); var fi_eval = new double[target.Length]; var jac_eval = new double[target.Length, thetaValues.Count]; // define the callback used by the alglib optimizer // the x argument for this callback represents our theta // local function void calculate_jacobian(double[] x, double[] fi, double[,] jac, object obj) { UpdateThetaValues(x); var autoDiffEval = new VectorAutoDiffEvaluator(); autoDiffEval.Evaluate(preparedTree, problemData.Dataset, problemData.TrainingIndices.ToArray(), GetParameterNodes(preparedTree, allThetaNodes), fi_eval, jac_eval); // calc sum of squared errors and gradient var sse = 0.0; var g = new double[x.Length]; for (int i = 0; i < target.Length; i++) { var res = target[i] - fi_eval[i]; sse += 0.5 * res * res; for (int j = 0; j < g.Length; j++) { g[j] -= res * jac_eval[i, j]; } } fi[0] = sse / target.Length; for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j] / target.Length; } var intervalEvaluator = new IntervalEvaluator(); for (int i = 0; i < constraintTrees.Count; i++) { var interval = intervalEvaluator.Evaluate(constraintTrees[i], dataIntervals, GetParameterNodes(constraintTrees[i], allThetaNodes), out double[] lowerGradient, out double[] upperGradient); // we transformed this to a constraint c(x) <= 0, so only the upper bound is relevant for us fi[i + 1] = interval.UpperBound; for (int j = 0; j < x.Length; j++) { jac[i + 1, j] = upperGradient[j]; } } } if (solver.Contains("minns")) { alglib.minnsstate state; alglib.minnsreport rep; try { alglib.minnscreate(thetaValues.Count, thetaValues.ToArray(), out state); // alglib.minnssetalgoslp(state); // SLP is more robust but slower alglib.minnssetbc(state, thetaValues.Select(_ => -10000.0).ToArray(), thetaValues.Select(_ => +10000.0).ToArray()); alglib.minnssetcond(state, 1E-7, maxIterations); var s = Enumerable.Repeat(1d, thetaValues.Count).ToArray(); // scale is set to unit scale alglib.minnssetscale(state, s); // set non-linear constraints: 0 equality constraints, constraintTrees inequality constraints alglib.minnssetnlc(state, 0, constraintTrees.Count); alglib.minnsoptimize(state, calculate_jacobian, null, null); alglib.minnsresults(state, out double[] xOpt, out rep); // counter.FunctionEvaluations += rep.nfev; TODO counter.GradientEvaluations += rep.nfev; if (rep.terminationtype != -8) { // update parameters in tree var pIdx = 0; foreach (var node in tree.IterateNodesPostfix()) { if (node is ConstantTreeNode constTreeNode) { constTreeNode.Value = xOpt[pIdx++]; } else if (node is VariableTreeNode varTreeNode) { varTreeNode.Weight = xOpt[pIdx++]; } } // note: we keep the optimized constants even when the tree is worse. } } catch (ArithmeticException) { // eval MSE of original tree return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); } catch (alglib.alglibexception) { // eval MSE of original tree return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); } } else if (solver.Contains("minnlc")) { alglib.minnlcstate state; alglib.minnlcreport rep; alglib.optguardreport optGuardRep; try { alglib.minnlccreate(thetaValues.Count, thetaValues.ToArray(), out state); alglib.minnlcsetalgoslp(state); // SLP is more robust but slower alglib.minnlcsetbc(state, thetaValues.Select(_ => -10000.0).ToArray(), thetaValues.Select(_ => +10000.0).ToArray()); alglib.minnlcsetcond(state, 1E-7, maxIterations); var s = Enumerable.Repeat(1d, thetaValues.Count).ToArray(); // scale is set to unit scale alglib.minnlcsetscale(state, s); // set non-linear constraints: 0 equality constraints, constraintTrees inequality constraints alglib.minnlcsetnlc(state, 0, constraintTrees.Count); alglib.minnlcoptguardsmoothness(state, 1); alglib.minnlcoptimize(state, calculate_jacobian, null, null); alglib.minnlcresults(state, out double[] xOpt, out rep); alglib.minnlcoptguardresults(state, out optGuardRep); if (optGuardRep.nonc0suspected) throw new InvalidProgramException("optGuardRep.nonc0suspected"); if (optGuardRep.nonc1suspected) throw new InvalidProgramException("optGuardRep.nonc1suspected"); // counter.FunctionEvaluations += rep.nfev; TODO counter.GradientEvaluations += rep.nfev; if (rep.terminationtype != -8) { // update parameters in tree var pIdx = 0; foreach (var node in tree.IterateNodesPostfix()) { if (node is ConstantTreeNode constTreeNode) { constTreeNode.Value = xOpt[pIdx++]; } else if (node is VariableTreeNode varTreeNode) { varTreeNode.Weight = xOpt[pIdx++]; } } // note: we keep the optimized constants even when the tree is worse. } } catch (ArithmeticException) { // eval MSE of original tree return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); } catch (alglib.alglibexception) { // eval MSE of original tree return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); } } else { throw new ArgumentException($"Unknown solver {solver}"); } // evaluate tree with updated constants return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false); } #region helper private static ISymbolicExpressionTreeNode[] GetParameterNodes(ISymbolicExpressionTree tree, List[] allNodes) { // TODO better solution necessary var treeConstNodes = tree.IterateNodesPostfix().OfType().ToArray(); var paramNodes = new ISymbolicExpressionTreeNode[allNodes.Length]; for (int i = 0; i < paramNodes.Length; i++) { paramNodes[i] = allNodes[i].SingleOrDefault(n => treeConstNodes.Contains(n)); } return paramNodes; } private static ISymbolicExpressionTree ReplaceVarWithConst(ISymbolicExpressionTree tree, List thetaNames, List thetaValues, List[] thetaNodes) { var copy = (ISymbolicExpressionTree)tree.Clone(); var nodes = copy.IterateNodesPostfix().ToList(); for (int i = 0; i < nodes.Count; i++) { var n = nodes[i] as VariableTreeNode; if (n != null) { var thetaIdx = thetaNames.IndexOf(n.VariableName); if (thetaIdx >= 0) { var parent = n.Parent; if (thetaNodes[thetaIdx].Any()) { // HACK: REUSE CONSTANT TREE NODE IN SEVERAL TREES // we use this trick to allow autodiff over thetas when thetas occurr multiple times in the tree (e.g. in derived trees) var constNode = thetaNodes[thetaIdx].First(); var childIdx = parent.IndexOfSubtree(n); parent.RemoveSubtree(childIdx); parent.InsertSubtree(childIdx, constNode); } else { var constNode = (ConstantTreeNode)CreateConstant(thetaValues[thetaIdx]); var childIdx = parent.IndexOfSubtree(n); parent.RemoveSubtree(childIdx); parent.InsertSubtree(childIdx, constNode); thetaNodes[thetaIdx].Add(constNode); } } } } return copy; } private static ISymbolicExpressionTree ReplaceConstWithVar(ISymbolicExpressionTree tree, out List thetaNames, out List thetaValues) { thetaNames = new List(); thetaValues = new List(); var copy = (ISymbolicExpressionTree)tree.Clone(); var nodes = copy.IterateNodesPostfix().ToList(); int n = 1; for (int i = 0; i < nodes.Count; ++i) { var node = nodes[i]; if (node is ConstantTreeNode constantTreeNode) { var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); thetaVar.Weight = 1; thetaVar.VariableName = $"θ{n++}"; thetaNames.Add(thetaVar.VariableName); thetaValues.Add(constantTreeNode.Value); var parent = constantTreeNode.Parent; if (parent != null) { var index = constantTreeNode.Parent.IndexOfSubtree(constantTreeNode); parent.RemoveSubtree(index); parent.InsertSubtree(index, thetaVar); } } if (node is VariableTreeNode varTreeNode) { var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); thetaVar.Weight = 1; thetaVar.VariableName = $"θ{n++}"; thetaNames.Add(thetaVar.VariableName); thetaValues.Add(varTreeNode.Weight); var parent = varTreeNode.Parent; if (parent != null) { var index = varTreeNode.Parent.IndexOfSubtree(varTreeNode); parent.RemoveSubtree(index); var prodNode = MakeNode(); varTreeNode.Weight = 1.0; prodNode.AddSubtree(varTreeNode); prodNode.AddSubtree(thetaVar); parent.InsertSubtree(index, prodNode); } } } return copy; } private static ISymbolicExpressionTreeNode CreateConstant(double value) { var constantNode = (ConstantTreeNode)new Constant().CreateTreeNode(); constantNode.Value = value; return constantNode; } private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTree t, ISymbolicExpressionTreeNode b) { var sub = MakeNode(t.Root.GetSubtree(0).GetSubtree(0), b); t.Root.GetSubtree(0).RemoveSubtree(0); t.Root.GetSubtree(0).InsertSubtree(0, sub); return t; } private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTreeNode b, ISymbolicExpressionTree t) { var sub = MakeNode(b, t.Root.GetSubtree(0).GetSubtree(0)); t.Root.GetSubtree(0).RemoveSubtree(0); t.Root.GetSubtree(0).InsertSubtree(0, sub); return t; } private static ISymbolicExpressionTreeNode MakeNode(params ISymbolicExpressionTreeNode[] fs) where T : ISymbol, new() { var node = new T().CreateTreeNode(); foreach (var f in fs) node.AddSubtree(f); return node; } #endregion private static void UpdateConstants(ISymbolicExpressionTreeNode[] nodes, double[] constants) { if (nodes.Length != constants.Length) throw new InvalidOperationException(); for (int i = 0; i < nodes.Length; i++) { if (nodes[i] is VariableTreeNode varNode) varNode.Weight = constants[i]; else if (nodes[i] is ConstantTreeNode constNode) constNode.Value = constants[i]; } } private static alglib.ndimensional_fvec CreateFunc(ISymbolicExpressionTree tree, VectorEvaluator eval, ISymbolicExpressionTreeNode[] parameterNodes, IDataset ds, string targetVar, int[] rows) { var y = ds.GetDoubleValues(targetVar, rows).ToArray(); return (double[] c, double[] fi, object o) => { UpdateConstants(parameterNodes, c); var pred = eval.Evaluate(tree, ds, rows); for (int i = 0; i < fi.Length; i++) fi[i] = pred[i] - y[i]; var counter = (EvaluationsCounter)o; counter.FunctionEvaluations++; }; } private static alglib.ndimensional_jac CreateJac(ISymbolicExpressionTree tree, VectorAutoDiffEvaluator eval, ISymbolicExpressionTreeNode[] parameterNodes, IDataset ds, string targetVar, int[] rows) { var y = ds.GetDoubleValues(targetVar, rows).ToArray(); return (double[] c, double[] fi, double[,] jac, object o) => { UpdateConstants(parameterNodes, c); eval.Evaluate(tree, ds, rows, parameterNodes, fi, jac); for (int i = 0; i < fi.Length; i++) fi[i] -= y[i]; var counter = (EvaluationsCounter)o; counter.GradientEvaluations++; }; } public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) { return TreeToAutoDiffTermConverter.IsCompatible(tree); } } }