Changeset 12820
- Timestamp:
- 07/30/15 18:42:18 (9 years ago)
- Location:
- branches/crossvalidation-2434
- Files:
-
- 21 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/crossvalidation-2434
- Property svn:mergeinfo changed
/trunk/sources (added) merged: 12787,12790-12795,12797-12798,12801,12810-12812,12816-12817,12819
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Algorithms.DataAnalysis (added) merged: 12790,12792,12797,12817,12819
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessBase.cs
r12012 r12820 108 108 Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed 109 109 110 // necessary for BFGS 111 Parameters.Add(new ValueParameter<BoolValue>("Maximization", new BoolValue(false))); 112 Parameters["Maximization"].Hidden = true; 113 110 114 var randomCreator = new HeuristicLab.Random.RandomCreator(); 111 115 var gpInitializer = new GaussianProcessHyperparameterInitializer(); … … 181 185 [StorableHook(HookType.AfterDeserialization)] 182 186 private void AfterDeserialization() { 187 // BackwardsCompatibility3.4 188 #region Backwards compatible code, remove with 3.5 189 if (!Parameters.ContainsKey("Maximization")) { 190 Parameters.Add(new ValueParameter<BoolValue>("Maximization", new BoolValue(false))); 191 Parameters["Maximization"].Hidden = true; 192 } 193 #endregion 183 194 } 184 195 } -
branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r12509 r12820 85 85 private double[] covarianceParameter; 86 86 87 [Storable] 88 private double[,] l; 89 90 [Storable] 91 private double[,] x; 87 private double[,] l; // used to be storable in previous versions (is calculated lazily now) 88 private double[,] x; // scaled training dataset, used to be storable in previous versions (is calculated lazily now) 89 90 // BackwardsCompatibility3.4 91 #region Backwards compatible code, remove with 3.5 92 [Storable(Name = "l")] // restore if available but don't store anymore 93 private double[,] l_storable { 94 set { this.l = value; } 95 get { 96 if (trainingDataset == null) return l; // this model has been created with an old version 97 else return null; // if the training dataset is available l should not be serialized 98 } 99 } 100 [Storable(Name = "x")] // restore if available but don't store anymore 101 private double[,] x_storable { 102 set { this.x = value; } 103 get { 104 if (trainingDataset == null) return x; // this model has been created with an old version 105 else return null; // if the training dataset is available x should not be serialized 106 } 107 } 108 #endregion 109 110 111 [Storable] 112 private IDataset trainingDataset; // it is better to store the original training dataset completely because this is more efficient in persistence 113 [Storable] 114 private int[] trainingRows; 115 92 116 [Storable] 93 117 private Scaling inputScaling; … … 101 125 this.covarianceFunction = cloner.Clone(original.covarianceFunction); 102 126 this.inputScaling = cloner.Clone(original.inputScaling); 127 this.trainingDataset = cloner.Clone(original.trainingDataset); 103 128 this.negativeLogLikelihood = original.negativeLogLikelihood; 104 129 this.targetVariable = original.targetVariable; … … 112 137 113 138 // shallow copies of arrays because they cannot be modified 139 this.trainingRows = original.trainingRows; 114 140 this.allowedInputVariables = original.allowedInputVariables; 115 141 this.alpha = original.alpha; … … 137 163 .ToArray(); 138 164 sqrSigmaNoise = Math.Exp(2.0 * hyp.Last()); 139 140 165 CalculateModel(ds, rows); 141 166 } 142 167 143 168 private void CalculateModel(IDataset ds, IEnumerable<int> rows) { 144 inputScaling = new Scaling(ds, allowedInputVariables, rows); 145 x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling); 169 this.trainingDataset = (IDataset)ds.Clone(); 170 this.trainingRows = rows.ToArray(); 171 this.inputScaling = new Scaling(trainingDataset, allowedInputVariables, rows); 172 this.x = CalculateX(trainingDataset, allowedInputVariables, rows, inputScaling); 146 173 var y = ds.GetDoubleValues(targetVariable, rows); 147 174 148 175 int n = x.GetLength(0); 149 l = new double[n, n]; 150 151 // calculate means and covariances 176 177 // calculate cholesky decomposed (lower triangular) covariance matrix 178 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1))); 179 this.l = CalculateL(x, cov, sqrSigmaNoise); 180 181 // calculate mean 152 182 var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, x.GetLength(1))); 153 183 double[] m = Enumerable.Range(0, x.GetLength(0)) … … 155 185 .ToArray(); 156 186 157 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1))); 158 for (int i = 0; i < n; i++) { 159 for (int j = i; j < n; j++) { 160 l[j, i] = cov.Covariance(x, i, j) / sqrSigmaNoise; 161 if (j == i) l[j, i] += 1.0; 162 } 163 } 164 165 166 // cholesky decomposition 187 188 189 // calculate sum of diagonal elements for likelihood 190 double diagSum = Enumerable.Range(0, n).Select(i => Math.Log(l[i, i])).Sum(); 191 192 // solve for alpha 193 double[] ym = y.Zip(m, (a, b) => a - b).ToArray(); 194 167 195 int info; 168 196 alglib.densesolverreport denseSolveRep; 169 170 var res = alglib.trfac.spdmatrixcholesky(ref l, n, false);171 if (!res) throw new ArgumentException("Matrix is not positive semidefinite");172 173 // calculate sum of diagonal elements for likelihood174 double diagSum = Enumerable.Range(0, n).Select(i => Math.Log(l[i, i])).Sum();175 176 // solve for alpha177 double[] ym = y.Zip(m, (a, b) => a - b).ToArray();178 197 179 198 alglib.spdmatrixcholeskysolve(l, n, false, ym, out info, out denseSolveRep, out alpha); … … 230 249 } 231 250 251 private static double[,] CalculateX(IDataset ds, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows, Scaling inputScaling) { 252 return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling); 253 } 254 255 private static double[,] CalculateL(double[,] x, ParameterizedCovarianceFunction cov, double sqrSigmaNoise) { 256 int n = x.GetLength(0); 257 var l = new double[n, n]; 258 259 // calculate covariances 260 for (int i = 0; i < n; i++) { 261 for (int j = i; j < n; j++) { 262 l[j, i] = cov.Covariance(x, i, j) / sqrSigmaNoise; 263 if (j == i) l[j, i] += 1.0; 264 } 265 } 266 267 // cholesky decomposition 268 var res = alglib.trfac.spdmatrixcholesky(ref l, n, false); 269 if (!res) throw new ArgumentException("Matrix is not positive semidefinite"); 270 return l; 271 } 272 232 273 233 274 public override IDeepCloneable Clone(Cloner cloner) { … … 258 299 259 300 private IEnumerable<double> GetEstimatedValuesHelper(IDataset dataset, IEnumerable<int> rows) { 301 if (x == null) { 302 this.x = CalculateX(trainingDataset, allowedInputVariables, trainingRows, inputScaling); 303 } 304 int n = x.GetLength(0); 305 260 306 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling); 261 307 int newN = newX.GetLength(0); 262 int n = x.GetLength(0); 308 263 309 var Ks = new double[newN, n]; 264 310 var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, newX.GetLength(1))); … … 278 324 279 325 public IEnumerable<double> GetEstimatedVariance(IDataset dataset, IEnumerable<int> rows) { 326 if (x == null) { 327 this.x = CalculateX(trainingDataset, allowedInputVariables, trainingRows, inputScaling); 328 } 329 int n = x.GetLength(0); 330 280 331 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling); 281 332 int newN = newX.GetLength(0); 282 int n = x.GetLength(0);283 333 284 334 var kss = new double[newN]; 285 335 double[,] sWKs = new double[n, newN]; 286 336 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1))); 337 338 if (l == null) { 339 l = CalculateL(x, cov, sqrSigmaNoise); 340 } 287 341 288 342 // for stddev -
branches/crossvalidation-2434/HeuristicLab.Algorithms.GradientDescent/3.3/Lbfgs.cs
r12504 r12820 200 200 RegisterEvents(); 201 201 solutionCreator.OperatorParameter.ActualName = Problem.SolutionCreatorParameter.Name; 202 solutionCreator.OperatorParameter.Hidden = true; 202 203 evaluator.OperatorParameter.ActualName = Problem.EvaluatorParameter.Name; 204 evaluator.OperatorParameter.Hidden = true; 203 205 UpdateAnalyzers(); 204 206 ParameterizeOperators(); … … 220 222 protected override void Problem_OperatorsChanged(object sender, EventArgs e) { 221 223 base.Problem_OperatorsChanged(sender, e); 224 RegisterEvents(); 225 solutionCreator.OperatorParameter.ActualName = Problem.SolutionCreatorParameter.Name; 226 solutionCreator.OperatorParameter.Hidden = true; 227 evaluator.OperatorParameter.ActualName = Problem.EvaluatorParameter.Name; 228 evaluator.OperatorParameter.Hidden = true; 222 229 UpdateAnalyzers(); 230 ParameterizeOperators(); 223 231 } 224 232 … … 266 274 var realVectorParameterName = realVectorCreator.RealVectorParameter.ActualName; 267 275 initializer.PointParameter.ActualName = realVectorParameterName; 276 initializer.PointParameter.Hidden = true; 268 277 makeStep.PointParameter.ActualName = realVectorParameterName; 278 makeStep.PointParameter.Hidden = true; 269 279 analyzer.PointParameter.ActualName = realVectorParameterName; 280 analyzer.PointParameter.Hidden = true; 270 281 } 271 282 272 283 var qualityParameterName = Problem.Evaluator.QualityParameter.ActualName; 273 284 updateResults.QualityParameter.ActualName = qualityParameterName; 285 updateResults.QualityParameter.Hidden = true; 274 286 analyzer.QualityParameter.ActualName = qualityParameterName; 287 analyzer.QualityParameter.Hidden = true; 275 288 } 276 289 } -
branches/crossvalidation-2434/HeuristicLab.Algorithms.GradientDescent/3.3/LbfgsUpdateResults.cs
r12012 r12820 37 37 private const string StateParameterName = "State"; 38 38 private const string ApproximateGradientsParameterName = "ApproximateGradients"; 39 private const string MaximizationParameterName = "Maximization"; 39 40 40 41 #region Parameter Properties … … 51 52 get { return (ILookupParameter<LbfgsState>)Parameters[StateParameterName]; } 52 53 } 54 public ILookupParameter<BoolValue> MaximizationParameter { 55 get { return (ILookupParameter<BoolValue>)Parameters[MaximizationParameterName]; } 56 } 53 57 #endregion 54 58 … … 58 62 private DoubleValue Quality { get { return QualityParameter.ActualValue; } } 59 63 private LbfgsState State { get { return StateParameter.ActualValue; } } 64 65 private BoolValue Maximization { 66 get { 67 // BackwardsCompatibility3.3 68 #region Backwards compatible code, remove with 3.4 69 // the parameter is new, previously we assumed minimization problems 70 if (MaximizationParameter.ActualValue == null) return new BoolValue(false); 71 #endregion 72 return MaximizationParameter.ActualValue; 73 } 74 } 75 60 76 #endregion 61 77 … … 70 86 Parameters.Add(new LookupParameter<BoolValue>(ApproximateGradientsParameterName, 71 87 "Flag that indicates if gradients should be approximated.")); 88 Parameters.Add(new LookupParameter<BoolValue>(MaximizationParameterName, "Flag that indicates if we solve a maximization problem.")); 72 89 // in & out 73 90 Parameters.Add(new LookupParameter<LbfgsState>(StateParameterName, "The state of the LM-BFGS algorithm.")); 91 } 92 93 [StorableHook(HookType.AfterDeserialization)] 94 private void AfterDeserialization() { 95 // BackwardsCompatibility3.3 96 97 #region Backwards compatible code, remove with 3.4 98 if (!Parameters.ContainsKey(MaximizationParameterName)) { 99 // previous behaviour defaulted to minimization 100 Parameters.Add(new LookupParameter<BoolValue>(MaximizationParameterName, "Flag that indicates if we solve a maximization problem.")); 101 } 102 #endregion 74 103 } 75 104 … … 80 109 public override IOperation Apply() { 81 110 var state = State; 82 var f = Quality.Value; 111 var sign = Maximization.Value ? -1.0 : 1.0; 112 var f = sign * Quality.Value; 83 113 state.State.f = f; 84 114 if (!ApproximateGradients.Value) { 85 var g = QualityGradients. ToArray();115 var g = QualityGradients.Select(gi => sign * gi).ToArray(); 86 116 state.State.g = g; 87 117 } -
branches/crossvalidation-2434/HeuristicLab.Optimization.Views
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Optimization.Views (added) merged: 12787
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Optimization.Views/3.3/RunCollectionViews/RunCollectionBoxPlotView.cs
r12077 r12820 345 345 switch (axisDimension) { 346 346 case AxisDimension.Color: { 347 value = GetCategoricalValue(-1, run.Color.ToString()); 347 const int colorDimension = -1; 348 if (!categoricalMapping.ContainsKey(colorDimension)) { 349 categoricalMapping[colorDimension] = Content.Where(r => r.Visible) 350 .Select(r => r.Color.Name) 351 .Distinct() 352 .OrderBy(c => c, new NaturalStringComparer()) 353 .Select((c, i) => new { Color = c, Index = i }) 354 .ToDictionary(a => (object)a.Color, a => (double)a.Index); 355 356 } 357 value = GetCategoricalValue(colorDimension, run.Color.Name); 348 358 break; 349 359 } -
branches/crossvalidation-2434/HeuristicLab.Optimizer
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Optimizer (added) merged: 12812
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis (added) merged: 12790,12792,12816-12817
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationEnsembleSolution.cs
r12509 r12820 49 49 } 50 50 51 [Storable] 51 52 private readonly ItemCollection<IClassificationSolution> classificationSolutions; 52 53 public IItemCollection<IClassificationSolution> ClassificationSolutions { … … 66 67 [StorableHook(HookType.AfterDeserialization)] 67 68 private void AfterDeserialization() { 68 foreach (var model in Model.Models) { 69 IClassificationProblemData problemData = (IClassificationProblemData)ProblemData.Clone(); 70 problemData.TrainingPartition.Start = trainingPartitions[model].Start; 71 problemData.TrainingPartition.End = trainingPartitions[model].End; 72 problemData.TestPartition.Start = testPartitions[model].Start; 73 problemData.TestPartition.End = testPartitions[model].End; 74 75 classificationSolutions.Add(model.CreateClassificationSolution(problemData)); 69 if (!classificationSolutions.Any()) { 70 foreach (var model in Model.Models) { 71 IClassificationProblemData problemData = (IClassificationProblemData)ProblemData.Clone(); 72 problemData.TrainingPartition.Start = trainingPartitions[model].Start; 73 problemData.TrainingPartition.End = trainingPartitions[model].End; 74 problemData.TestPartition.Start = testPartitions[model].Start; 75 problemData.TestPartition.End = testPartitions[model].End; 76 77 classificationSolutions.Add(model.CreateClassificationSolution(problemData)); 78 } 76 79 } 77 80 RegisterClassificationSolutionsEventHandler(); -
branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/RegressionEnsembleSolution.cs
r12509 r12820 50 50 } 51 51 52 [Storable] 52 53 private readonly ItemCollection<IRegressionSolution> regressionSolutions; 53 54 public IItemCollection<IRegressionSolution> RegressionSolutions { … … 67 68 [StorableHook(HookType.AfterDeserialization)] 68 69 private void AfterDeserialization() { 69 foreach (var model in Model.Models) { 70 IRegressionProblemData problemData = (IRegressionProblemData)ProblemData.Clone(); 71 problemData.TrainingPartition.Start = trainingPartitions[model].Start; 72 problemData.TrainingPartition.End = trainingPartitions[model].End; 73 problemData.TestPartition.Start = testPartitions[model].Start; 74 problemData.TestPartition.End = testPartitions[model].End; 75 76 regressionSolutions.Add(model.CreateRegressionSolution(problemData)); 70 if (!regressionSolutions.Any()) { 71 foreach (var model in Model.Models) { 72 IRegressionProblemData problemData = (IRegressionProblemData)ProblemData.Clone(); 73 problemData.TrainingPartition.Start = trainingPartitions[model].Start; 74 problemData.TrainingPartition.End = trainingPartitions[model].End; 75 problemData.TestPartition.Start = testPartitions[model].Start; 76 problemData.TestPartition.End = testPartitions[model].End; 77 78 regressionSolutions.Add(model.CreateRegressionSolution(problemData)); 79 } 77 80 } 78 81 RegisterRegressionSolutionsEventHandler(); -
branches/crossvalidation-2434/HeuristicLab.Problems.NK/3.3
-
Property
svn:ignore
set to
bin
obj
Plugin.cs
-
Property
svn:ignore
set to
-
branches/crossvalidation-2434/HeuristicLab.Problems.QuadraticAssignment
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.QuadraticAssignment (added) merged: 12811
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Problems.QuadraticAssignment.Algorithms/3.3/RobustTabooSeachOperator.cs
r12012 r12820 90 90 private ILookupParameter<BoolValue> AllMovesTabuParameter { 91 91 get { return (ILookupParameter<BoolValue>)Parameters["AllMovesTabu"]; } 92 } 93 94 public ILookupParameter<IntValue> EvaluatedMovesParameter { 95 get { return (ILookupParameter<IntValue>)Parameters["EvaluatedMoves"]; } 92 96 } 93 97 #endregion … … 117 121 Parameters.Add(new ValueLookupParameter<IntValue>("AlternativeAspirationTenure", "The time t that a move will be remembered for the alternative aspiration condition.")); 118 122 Parameters.Add(new LookupParameter<BoolValue>("AllMovesTabu", "Indicates that all moves are tabu.")); 123 Parameters.Add(new LookupParameter<IntValue>("EvaluatedMoves", "The number of move evaluations made.")); 119 124 } 120 125 … … 129 134 if (!Parameters.ContainsKey("AllMovesTabu")) { 130 135 Parameters.Add(new LookupParameter<BoolValue>("AllMovesTabu", "Indicates that all moves are tabu.")); 136 } 137 if (!Parameters.ContainsKey("EvaluatedMoves")) { 138 Parameters.Add(new LookupParameter<IntValue>("EvaluatedMoves", "The number of move evaluations made.")); 131 139 } 132 140 #endregion … … 163 171 bool already_aspired = false; 164 172 173 var evaluatedMoves = 0; 165 174 foreach (Swap2Move move in ExhaustiveSwap2MoveGenerator.Generate(solution)) { 166 175 double moveQuality; 167 if (lastMove == null) 176 if (lastMove == null) { 168 177 moveQuality = QAPSwap2MoveEvaluator.Apply(solution, move, weights, distances); 169 else if (allMovesTabu) moveQuality = moveQualityMatrix[move.Index1, move.Index2]; 170 else moveQuality = QAPSwap2MoveEvaluator.Apply(solution, move, moveQualityMatrix[move.Index1, move.Index2], weights, distances, lastMove); 178 evaluatedMoves++; 179 } else if (allMovesTabu) moveQuality = moveQualityMatrix[move.Index1, move.Index2]; 180 else { 181 moveQuality = QAPSwap2MoveEvaluator.Apply(solution, move, moveQualityMatrix[move.Index1, move.Index2], weights, distances, lastMove); 182 evaluatedMoves++; 183 } 171 184 172 185 moveQualityMatrix[move.Index1, move.Index2] = moveQuality; … … 200 213 } 201 214 } 215 216 EvaluatedMovesParameter.ActualValue.Value += evaluatedMoves; 202 217 203 218 allMovesTabu = bestMove == null; -
branches/crossvalidation-2434/HeuristicLab.Problems.QuadraticAssignment.Algorithms/3.3/RobustTabooSearch.cs
r12504 r12820 168 168 VariableCreator variableCreator = new VariableCreator(); 169 169 variableCreator.CollectedValues.Add(new ValueParameter<IntValue>("Iterations", new IntValue(0))); 170 variableCreator.CollectedValues.Add(new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0))); 171 variableCreator.CollectedValues.Add(new ValueParameter<IntValue>("EvaluatedMoves", new IntValue(0))); 170 172 171 173 ResultsCollector resultsCollector = new ResultsCollector(); 172 174 resultsCollector.CollectedValues.Add(new LookupParameter<IntValue>("Iterations", "The actual iteration.")); 175 resultsCollector.CollectedValues.Add(new LookupParameter<IntValue>("EvaluatedSolutions", "The number of full solution evaluations.")); 176 resultsCollector.CollectedValues.Add(new LookupParameter<IntValue>("EvaluatedMoves", "The number of move evaluations.")); 173 177 174 178 solutionsCreator = new SolutionsCreator(); 175 179 solutionsCreator.NumberOfSolutions = new IntValue(1); 180 181 IntCounter counter = new IntCounter(); 182 counter.ValueParameter.ActualName = "EvaluatedSolutions"; 183 counter.Increment = new IntValue(1); 176 184 177 185 Placeholder analyzer = new Placeholder(); … … 194 202 mainOperator.ShortTermMemoryParameter.ActualName = "ShortTermMemory"; 195 203 mainOperator.UseAlternativeAspirationParameter.ActualName = UseAlternativeAspirationParameter.Name; 204 mainOperator.EvaluatedMovesParameter.ActualName = "EvaluatedMoves"; 196 205 197 206 ConditionalBranch qualityStopBranch = new ConditionalBranch(); … … 226 235 variableCreator.Successor = resultsCollector; 227 236 resultsCollector.Successor = solutionsCreator; 228 solutionsCreator.Successor = analyzer; 237 solutionsCreator.Successor = counter; 238 counter.Successor = analyzer; 229 239 analyzer.Successor = ussp; 230 240 ussp.Operator = mainOperator; -
branches/crossvalidation-2434/HeuristicLab.Problems.QuadraticAssignment/3.3/LocalImprovement/QAPExhaustiveInversionLocalImprovement.cs
r12012 r12820 108 108 } 109 109 } 110 evaluatedSolutions.Value = (int)Math.Ceiling(evaluations);110 evaluatedSolutions.Value += (int)Math.Ceiling(evaluations); 111 111 if (bestMove == null) break; 112 112 InversionManipulator.Apply(assignment, bestMove.Index1, bestMove.Index2); -
branches/crossvalidation-2434/HeuristicLab.Problems.QuadraticAssignment/3.3/LocalImprovement/QAPStochasticScrambleLocalImprovement.cs
r12012 r12820 119 119 } 120 120 } 121 evaluatedSolutions.Value = (int)Math.Ceiling(evaluations);121 evaluatedSolutions.Value += (int)Math.Ceiling(evaluations); 122 122 if (bestMove == null) break; 123 123 ScrambleManipulator.Apply(assignment, bestMove.StartIndex, bestMove.ScrambledIndices); -
branches/crossvalidation-2434/HeuristicLab.Tests
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Tests (added) merged: 12791,12798,12801,12812,12817
- Property svn:mergeinfo changed
-
branches/crossvalidation-2434/HeuristicLab.Tests/HeuristicLab-3.3/Samples/VnsOpSampleTest.cs
r12722 r12820 75 75 76 76 opProblem.Name = "1_p64_t070"; 77 opProblem.Description = "Represents a symmetric Traveling Salesman Problem.";77 opProblem.Description = "Represents an instance of an orienteering problem."; 78 78 #endregion 79 79 #region Algorithm Configuration 80 vns.Name = "Variable Neighborhood Search - TSP";81 vns.Description = "A variable neighborhood search algorithm which solves a funny TSPinstance";80 vns.Name = "Variable Neighborhood Search - OP"; 81 vns.Description = "A variable neighborhood search algorithm which solves an orienteering problem instance"; 82 82 vns.Problem = opProblem; 83 83
Note: See TracChangeset
for help on using the changeset viewer.