Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2780_SAPBA/SAPBA.patch @ 16375

Last change on this file since 16375 was 16108, checked in by bwerth, 6 years ago

#2780 renamed branch to include ticket number

File size: 76.1 KB
  • .

  • .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo

    Property changes on: .
    ___________________________________________________________________
    Added: svn:global-ignores
    ## -0,0 +1 ##
    +.vs
    Added: svn:ignore
    ## -0,0 +1 ##
    +.vs
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/Sliding

    Property changes on: .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo

    Property changes on: .vs/Sliding Window GP/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo

    Property changes on: .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo

    Property changes on: .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/Sliding

    Property changes on: .vs/HeuristicLab.Algorithms.SAPBA/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/Sliding

    Property changes on: .vs/Sliding Window GP/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • .vs/Sliding

    Property changes on: .vs/Sliding Window GP/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
    Cannot display: file marked as a binary type.
    svn:mime-type = application/octet-stream
  • HeuristicLab.Algorithms.SAPBA/HeuristicLab.Algorithms.SAPBA-3.4.csproj

    Property changes on: .vs/Sliding Window GP/v14/.suo
    ___________________________________________________________________
    Deleted: svn:mime-type
    ## -1 +0,0 ##
    -application/octet-stream
    \ No newline at end of property
     
    3030    <WarningLevel>4</WarningLevel>
    3131  </PropertyGroup>
    3232  <ItemGroup>
    33     <Reference Include="HeuristicLab.Algorithms.CMAEvolutionStrategy-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    34       <SpecificVersion>False</SpecificVersion>
    35       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.CMAEvolutionStrategy-3.4.dll</HintPath>
    36     </Reference>
    3733    <Reference Include="HeuristicLab.Algorithms.DataAnalysis-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    3834      <SpecificVersion>False</SpecificVersion>
    3935      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.DataAnalysis-3.4.dll</HintPath>
    4036    </Reference>
    41     <Reference Include="HeuristicLab.Algorithms.EvolutionStrategy-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     37    <Reference Include="HeuristicLab.Algorithms.EGO-3.4, Version=3.4.0.0, Culture=neutral, processorArchitecture=MSIL">
    4238      <SpecificVersion>False</SpecificVersion>
    43       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.EvolutionStrategy-3.3.dll</HintPath>
     39      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.EGO-3.4.dll</HintPath>
    4440    </Reference>
    4541    <Reference Include="HeuristicLab.Algorithms.GeneticAlgorithm-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    4642      <SpecificVersion>False</SpecificVersion>
    4743      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.GeneticAlgorithm-3.3.dll</HintPath>
    4844    </Reference>
     45    <Reference Include="HeuristicLab.Algorithms.OffspringSelectionEvolutionStrategy-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     46      <SpecificVersion>False</SpecificVersion>
     47      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.OffspringSelectionEvolutionStrategy-3.3.dll</HintPath>
     48    </Reference>
    4949    <Reference Include="HeuristicLab.Analysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    5050      <SpecificVersion>False</SpecificVersion>
    5151      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Analysis-3.3.dll</HintPath>
     
    5858      <SpecificVersion>False</SpecificVersion>
    5959      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath>
    6060    </Reference>
    61     <Reference Include="HeuristicLab.Common.Resources-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    62       <SpecificVersion>False</SpecificVersion>
    63       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Common.Resources-3.3.dll</HintPath>
    64     </Reference>
    6561    <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    6662      <SpecificVersion>False</SpecificVersion>
    6763      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Core-3.3.dll</HintPath>
     
    7470      <SpecificVersion>False</SpecificVersion>
    7571      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Encodings.RealVectorEncoding-3.3.dll</HintPath>
    7672    </Reference>
    77     <Reference Include="HeuristicLab.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec" />
     73    <Reference Include="HeuristicLab.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     74      <SpecificVersion>False</SpecificVersion>
     75      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Operators-3.3.dll</HintPath>
     76    </Reference>
    7877    <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    7978      <SpecificVersion>False</SpecificVersion>
    8079      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Optimization-3.3.dll</HintPath>
     
    9998      <SpecificVersion>False</SpecificVersion>
    10099      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances-3.3.dll</HintPath>
    101100    </Reference>
    102     <Reference Include="HeuristicLab.Problems.Instances.DataAnalysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    103       <SpecificVersion>False</SpecificVersion>
    104       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll</HintPath>
    105     </Reference>
    106     <Reference Include="HeuristicLab.Problems.Instances.DataAnalysis.Views-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    107       <SpecificVersion>False</SpecificVersion>
    108       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances.DataAnalysis.Views-3.3.dll</HintPath>
    109     </Reference>
    110     <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    111       <SpecificVersion>False</SpecificVersion>
    112       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath>
    113     </Reference>
    114101    <Reference Include="System" />
    115102    <Reference Include="System.Core" />
    116103    <Reference Include="System.Windows.Forms" />
     
    122109    <Reference Include="System.Xml" />
    123110  </ItemGroup>
    124111  <ItemGroup>
    125     <Compile Include="EgoUtilities.cs" />
    126     <Compile Include="Strategies\IndividualStrategy.cs" />
    127     <Compile Include="Strategies\GenerationalStrategy.cs" />
     112    <Compile Include="Operators\FixedSolutionCreator.cs" />
     113    <Compile Include="SapbaUtilities.cs" />
     114    <Compile Include="Strategies\LamarckianStrategy.cs" />
     115    <Compile Include="Strategies\InfillStrategy.cs" />
    128116    <Compile Include="SurrogateAssistedPopulationBasedAlgorithm.cs" />
    129117    <Compile Include="Interfaces\ISurrogateStrategy.cs" />
    130118    <Compile Include="Interfaces\ISurrogateAlgorithm.cs" />
  • HeuristicLab.Algorithms.SAPBA/Interfaces/ISurrogateStrategy.cs

     
    1919 */
    2020#endregion
    2121
    22 using System.Threading;
    2322using HeuristicLab.Core;
    2423using HeuristicLab.Encodings.RealVectorEncoding;
    2524using HeuristicLab.Optimization;
     
    2928    double Evaluate(RealVector r, IRandom random);
    3029    void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random);
    3130    void Initialize(SurrogateAssistedPopulationBasedAlgorithm algorithm);
    32     void UpdateCancellation(CancellationToken cancellationToken);
     31    bool Maximization();
    3332  }
    3433}
  • HeuristicLab.Algorithms.SAPBA/Operators/FixedSolutionCreator.cs

     
     1using HeuristicLab.Common;
     2using HeuristicLab.Core;
     3using HeuristicLab.Data;
     4using HeuristicLab.Encodings.RealVectorEncoding;
     5using HeuristicLab.Optimization;
     6using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     7
     8namespace HeuristicLab.Algorithms.SAPBA.Operators {
     9  [Item("FixedRealVectorCreator", "An operator which creates a new real vector cloned from a single Point")]
     10  [StorableClass]
     11  public class FixedRealVectorCreator : RealVectorCreator, IStrategyParameterCreator {
     12    [Storable]
     13    private RealVector Point;
     14
     15    [StorableConstructor]
     16    protected FixedRealVectorCreator(bool deserializing) : base(deserializing) { }
     17    protected FixedRealVectorCreator(FixedRealVectorCreator original, Cloner cloner) : base(original, cloner) {
     18      Point = cloner.Clone(original.Point);
     19    }
     20    public FixedRealVectorCreator(RealVector r) : base() {
     21      Point = r;
     22    }
     23    public override IDeepCloneable Clone(Cloner cloner) { return new FixedRealVectorCreator(this, cloner); }
     24    protected override RealVector Create(IRandom random, IntValue length, DoubleMatrix bounds) {
     25      return (RealVector)Point.Clone();
     26    }
     27  }
     28
     29}
     30
  • HeuristicLab.Algorithms.SAPBA/Operators/FixedSolutionCreator.cs

     
     1using HeuristicLab.Common;
     2using HeuristicLab.Core;
     3using HeuristicLab.Data;
     4using HeuristicLab.Encodings.RealVectorEncoding;
     5using HeuristicLab.Optimization;
     6using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     7
     8namespace HeuristicLab.Algorithms.SAPBA.Operators {
     9  [Item("FixedRealVectorCreator", "An operator which creates a new real vector cloned from a single Point")]
     10  [StorableClass]
     11  public class FixedRealVectorCreator : RealVectorCreator, IStrategyParameterCreator {
     12    [Storable]
     13    private RealVector Point;
     14
     15    [StorableConstructor]
     16    protected FixedRealVectorCreator(bool deserializing) : base(deserializing) { }
     17    protected FixedRealVectorCreator(FixedRealVectorCreator original, Cloner cloner) : base(original, cloner) {
     18      Point = cloner.Clone(original.Point);
     19    }
     20    public FixedRealVectorCreator(RealVector r) : base() {
     21      Point = r;
     22    }
     23    public override IDeepCloneable Clone(Cloner cloner) { return new FixedRealVectorCreator(this, cloner); }
     24    protected override RealVector Create(IRandom random, IntValue length, DoubleMatrix bounds) {
     25      return (RealVector)Point.Clone();
     26    }
     27  }
     28
     29}
     30
  • HeuristicLab.Algorithms.SAPBA/Plugin.cs

     
    2525using HeuristicLab.PluginInfrastructure;
    2626
    2727namespace HeuristicLab.Algorithms.SAPBA {
    28   [Plugin("HeuristicLab.Algorithms.SAPBA", "3.4.5.14893")]
     28  [Plugin("HeuristicLab.Algorithms.SAPBA", "3.4.5.14894")]
    2929  [PluginFile("HeuristicLab.Algorithms.SAPBA-3.4.dll", PluginFileType.Assembly)]
    30   [PluginFile("displayModelFrame.html", PluginFileType.Data)]
    31   [PluginDependency("HeuristicLab.Algorithms.CMAEvolutionStrategy", "3.4")]
    3230  [PluginDependency("HeuristicLab.Algorithms.DataAnalysis", "3.4")]
     31  [PluginDependency("HeuristicLab.Algorithms.EGO", "3.4")]
     32  [PluginDependency("HeuristicLab.Algorithms.GeneticAlgorithm", "3.3")]
     33  [PluginDependency("HeuristicLab.Algorithms.OffspringSelectionEvolutionStrategy", "3.3")]
    3334  [PluginDependency("HeuristicLab.Analysis", "3.3")]
    3435  [PluginDependency("HeuristicLab.Collections", "3.3")]
    3536  [PluginDependency("HeuristicLab.Common", "3.3")]
    36   [PluginDependency("HeuristicLab.Common.Resources", "3.3")]
    3737  [PluginDependency("HeuristicLab.Core", "3.3")]
    3838  [PluginDependency("HeuristicLab.Data", "3.3")]
    3939  [PluginDependency("HeuristicLab.Encodings.RealVectorEncoding", "3.3")]
    40   [PluginDependency("HeuristicLab.Operators","3.3")]
    4140  [PluginDependency("HeuristicLab.Optimization","3.3")]
    4241  [PluginDependency("HeuristicLab.Parameters","3.3")]
    4342  [PluginDependency("HeuristicLab.Persistence","3.3")]
    4443  [PluginDependency("HeuristicLab.Problems.DataAnalysis", "3.4")]
    45   [PluginDependency("HeuristicLab.Problems.Instances", "3.3")]
    46   [PluginDependency("HeuristicLab.Random", "3.3")]
    4744  public class HeuristicLabProblemsDataAnalysisSymbolicViewsPlugin : PluginBase {
    4845  }
    4946}
  • HeuristicLab.Algorithms.SAPBA/Plugin.cs.frame

     
    2727namespace HeuristicLab.Algorithms.SAPBA {
    2828  [Plugin("HeuristicLab.Algorithms.SAPBA", "3.4.5.$WCREV$")]
    2929  [PluginFile("HeuristicLab.Algorithms.SAPBA-3.4.dll", PluginFileType.Assembly)]
    30   [PluginFile("displayModelFrame.html", PluginFileType.Data)]
    31   [PluginDependency("HeuristicLab.Algorithms.CMAEvolutionStrategy", "3.4")]
    3230  [PluginDependency("HeuristicLab.Algorithms.DataAnalysis", "3.4")]
     31  [PluginDependency("HeuristicLab.Algorithms.EGO", "3.4")]
     32  [PluginDependency("HeuristicLab.Algorithms.GeneticAlgorithm", "3.3")]
     33  [PluginDependency("HeuristicLab.Algorithms.OffspringSelectionEvolutionStrategy", "3.3")]
    3334  [PluginDependency("HeuristicLab.Analysis", "3.3")]
    3435  [PluginDependency("HeuristicLab.Collections", "3.3")]
    3536  [PluginDependency("HeuristicLab.Common", "3.3")]
    36   [PluginDependency("HeuristicLab.Common.Resources", "3.3")]
    3737  [PluginDependency("HeuristicLab.Core", "3.3")]
    3838  [PluginDependency("HeuristicLab.Data", "3.3")]
    3939  [PluginDependency("HeuristicLab.Encodings.RealVectorEncoding", "3.3")]
    40   [PluginDependency("HeuristicLab.Operators","3.3")]
    4140  [PluginDependency("HeuristicLab.Optimization","3.3")]
    4241  [PluginDependency("HeuristicLab.Parameters","3.3")]
    4342  [PluginDependency("HeuristicLab.Persistence","3.3")]
    4443  [PluginDependency("HeuristicLab.Problems.DataAnalysis", "3.4")]
    45   [PluginDependency("HeuristicLab.Problems.Instances", "3.3")]
    46   [PluginDependency("HeuristicLab.Random", "3.3")]
    4744  public class HeuristicLabProblemsDataAnalysisSymbolicViewsPlugin : PluginBase {
    4845  }
    4946}
  • HeuristicLab.Algorithms.SAPBA/Problems/SurrogateProblem.cs

     
    1 ï»¿using HeuristicLab.Common;
     1#region License Information
     2/* HeuristicLab
     3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 *
     5 * This file is part of HeuristicLab.
     6 *
     7 * HeuristicLab is free software: you can redistribute it and/or modify
     8 * it under the terms of the GNU General Public License as published by
     9 * the Free Software Foundation, either version 3 of the License, or
     10 * (at your option) any later version.
     11 *
     12 * HeuristicLab is distributed in the hope that it will be useful,
     13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 * GNU General Public License for more details.
     16 *
     17 * You should have received a copy of the GNU General Public License
     18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
     19 */
     20#endregion
     21
     22using System;
     23using System.Collections.Generic;
     24using HeuristicLab.Common;
    225using HeuristicLab.Core;
     26using HeuristicLab.Data;
    327using HeuristicLab.Encodings.RealVectorEncoding;
    428using HeuristicLab.Optimization;
    529using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     
    630
    731namespace HeuristicLab.Algorithms.SAPBA {
    832  [StorableClass]
    9   [Item("Surrogate problem (single-objective)", "Wrapper for a problem that allows surrogate models to mitigate some of the work")]
     33  [Item("Surrogate problem (single-objective)", "A problem that uses a Surrogate Strategy to emulate an expensive problem")]
    1034  public class SurrogateProblem : SingleObjectiveBasicProblem<RealVectorEncoding> {
    11 
     35    public override bool Maximization => Strategy?.Maximization() ?? false;
    1236    [Storable]
    1337    private ISurrogateStrategy Strategy;
    1438
     
    1842    [StorableHook(HookType.AfterDeserialization)]
    1943    private void AfterDeserialization() { }
    2044    protected SurrogateProblem(SurrogateProblem original, Cloner cloner) : base(original, cloner) {
    21       Strategy = original?.Strategy;
     45      Strategy = cloner.Clone(original.Strategy);
    2246    }
    23     public override IDeepCloneable Clone(Cloner cloner) { return new SurrogateProblem(this, cloner); }
    24     public SurrogateProblem() {
     47    public override IDeepCloneable Clone(Cloner cloner) {
     48      return new SurrogateProblem(this, cloner);
    2549    }
     50    public SurrogateProblem() { }
    2651    #endregion
    2752
    2853    public override double Evaluate(Individual individual, IRandom random) {
     
    3257      base.Analyze(individuals, qualities, results, random);
    3358      Strategy.Analyze(individuals, qualities, results, random);
    3459    }
     60    public override IEnumerable<Individual> GetNeighbors(Individual individual, IRandom random) {
     61      var bounds = Encoding.Bounds;
     62      var michalewiczIteration = 0;
     63      while (true) {
     64        var neighbour = individual.Copy();
     65        var r = neighbour.RealVector();
     66        switch (random.Next(5)) {
     67          case 0: UniformOnePositionManipulator.Apply(random, r, bounds); break;
     68          case 1: FixedNormalAllPositionsManipulator.Apply(random, r, new RealVector(new[] { 0.1 })); break;
     69          case 2: MichalewiczNonUniformAllPositionsManipulator.Apply(random, r, bounds, new IntValue(michalewiczIteration++), new IntValue(10000), new DoubleValue(5.0)); break;
     70          case 3: MichalewiczNonUniformOnePositionManipulator.Apply(random, r, bounds, new IntValue(michalewiczIteration++), new IntValue(10000), new DoubleValue(5.0)); break;
     71          case 4: BreederGeneticAlgorithmManipulator.Apply(random, r, bounds, new DoubleValue(0.1)); break;
     72          default: throw new NotImplementedException();
     73        }
     74        yield return neighbour;
     75        michalewiczIteration %= 10000;
     76      }
     77    }
    3578
    36     public override bool Maximization { get; }
    37 
    38     public void SetStrategy(ISurrogateStrategy strategy) {
     79    public void Initialize(SingleObjectiveBasicProblem<IEncoding> expensiveProblem, ISurrogateStrategy strategy) {
     80      if (expensiveProblem == null) return;
     81      var enc = (RealVectorEncoding)expensiveProblem.Encoding;
     82      Encoding.Bounds = enc.Bounds;
     83      Encoding.Length = enc.Length;
     84      SolutionCreator = expensiveProblem.SolutionCreator;
    3985      Strategy = strategy;
    4086    }
    41     public void SetProblem(SingleObjectiveBasicProblem<IEncoding> expensiveProblem) {
    42       if (expensiveProblem != null) Encoding = expensiveProblem.Encoding as RealVectorEncoding;
    43     }
    44 
    4587  }
    4688}
    47  No newline at end of file
  • HeuristicLab.Algorithms.SAPBA/Properties/AssemblyInfo.cs

     
    5252// You can specify all the values or you can default the Build and Revision Numbers
    5353// by using the '*' as shown below:
    5454[assembly: AssemblyVersion("3.4.0.0")]
    55 [assembly: AssemblyFileVersion("3.4.0.14893")]
     55[assembly: AssemblyFileVersion("3.4.0.14894")]
  • HeuristicLab.Algorithms.SAPBA/SapbaUtilities.cs

     
     1#region License Information
     2/* HeuristicLab
     3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 *
     5 * This file is part of HeuristicLab.
     6 *
     7 * HeuristicLab is free software: you can redistribute it and/or modify
     8 * it under the terms of the GNU General Public License as published by
     9 * the Free Software Foundation, either version 3 of the License, or
     10 * (at your option) any later version.
     11 *
     12 * HeuristicLab is distributed in the hope that it will be useful,
     13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 * GNU General Public License for more details.
     16 *
     17 * You should have received a copy of the GNU General Public License
     18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
     19 */
     20#endregion
     21
     22using System;
     23using System.Collections.Generic;
     24using System.Linq;
     25using System.Threading;
     26using HeuristicLab.Algorithms.DataAnalysis;
     27using HeuristicLab.Common;
     28using HeuristicLab.Core;
     29using HeuristicLab.Data;
     30using HeuristicLab.Encodings.RealVectorEncoding;
     31using HeuristicLab.Optimization;
     32using HeuristicLab.Problems.DataAnalysis;
     33
     34namespace HeuristicLab.Algorithms.SAPBA {
     35  internal static class SapbaUtilities {
     36    //Extention methods for convenience
     37    public static int ArgMax<T>(this IEnumerable<T> values, Func<T, double> func) {
     38      var max = double.MinValue;
     39      var maxIdx = 0;
     40      var idx = 0;
     41      foreach (var v in values) {
     42        var d = func.Invoke(v);
     43        if (d > max) {
     44          max = d;
     45          maxIdx = idx;
     46        }
     47        idx++;
     48      }
     49      return maxIdx;
     50    }
     51    public static int ArgMin<T>(this IEnumerable<T> values, Func<T, double> func) {
     52      return ArgMax(values, x => -func.Invoke(x));
     53    }
     54    public static double GetEstimation(this IRegressionModel model, RealVector r) {
     55      var dataset = GetDataSet(new[] { new Tuple<RealVector, double>(r, 0.0) }, false);
     56      return model.GetEstimatedValues(dataset, new[] { 0 }).First();
     57    }
     58    public static double GetVariance(this IConfidenceRegressionModel model, RealVector r) {
     59      var dataset = GetDataSet(new[] { new Tuple<RealVector, double>(r, 0.0) }, false);
     60      return model.GetEstimatedVariances(dataset, new[] { 0 }).First();
     61    }
     62    public static double GetDoubleValue(this IDataset dataset, int i, int j) {
     63      return dataset.GetDoubleValue("input" + j, i);
     64    }
     65
     66    //Sub-Algorithms
     67    public static ResultCollection SyncRunSubAlgorithm(IAlgorithm alg, int random) {
     68      if (alg.Parameters.ContainsKey("SetSeedRandomly") && alg.Parameters.ContainsKey("Seed")) {
     69        var setSeed = alg.Parameters["SetSeedRandomly"].ActualValue as BoolValue;
     70        var seed = alg.Parameters["Seed"].ActualValue as IntValue;
     71        if (seed == null || setSeed == null) throw new ArgumentException("wrong SeedParametertypes");
     72        setSeed.Value = false;
     73        seed.Value = random;
     74
     75      }
     76      EventWaitHandle trigger = new AutoResetEvent(false);
     77      Exception ex = null;
     78      EventHandler<EventArgs<Exception>> exhandler = (sender, e) => ex = e.Value;
     79      EventHandler stoppedHandler = (sender, e) => trigger.Set();
     80      alg.ExceptionOccurred += exhandler;
     81      alg.Stopped += stoppedHandler;
     82      alg.Prepare();
     83      alg.Start();
     84      trigger.WaitOne();
     85      alg.ExceptionOccurred -= exhandler;
     86      alg.Stopped -= stoppedHandler;
     87      if (ex != null) throw ex;
     88      return alg.Results;
     89    }
     90    public static IRegressionSolution BuildModel(IReadOnlyList<Tuple<RealVector, double>> samples, IDataAnalysisAlgorithm<IRegressionProblem> regressionAlgorithm, IRandom random, bool removeDuplicates = true, IRegressionSolution oldSolution = null) {
     91      var dataset = GetDataSet(samples, removeDuplicates);
     92      var problemdata = new RegressionProblemData(dataset, dataset.VariableNames.Where(x => !x.Equals("output")), "output");
     93      problemdata.TrainingPartition.Start = 0;
     94      problemdata.TrainingPartition.End = dataset.Rows;
     95      problemdata.TestPartition.Start = dataset.Rows;
     96      problemdata.TestPartition.End = dataset.Rows;
     97
     98      if (regressionAlgorithm.Problem == null) regressionAlgorithm.Problem = new RegressionProblem();
     99      var problem = regressionAlgorithm.Problem;
     100      problem.ProblemDataParameter.Value = problemdata;
     101      var i = 0;
     102      IRegressionSolution solution = null;
     103
     104      while (solution == null && i++ < 100) {
     105        var results = SyncRunSubAlgorithm(regressionAlgorithm, random.Next(int.MaxValue));
     106        solution = results.Select(x => x.Value).OfType<IRegressionSolution>().SingleOrDefault();
     107      }
     108
     109      //special treatement for GaussianProcessRegression
     110      var gp = regressionAlgorithm as GaussianProcessRegression;
     111      var oldGaussian = oldSolution as GaussianProcessRegressionSolution;
     112      if (gp != null && oldGaussian != null) {
     113        const double noise = 0.0;
     114        var n = samples.First().Item1.Length;
     115        var mean = (IMeanFunction)oldGaussian.Model.MeanFunction.Clone();
     116        var cov = (ICovarianceFunction)oldGaussian.Model.CovarianceFunction.Clone();
     117        if (mean.GetNumberOfParameters(n) != 0 || cov.GetNumberOfParameters(n) != 0) throw new ArgumentException("DEBUG: assumption about fixed paramters wrong");
     118        double[] hyp = { noise };
     119        try {
     120          var model = new GaussianProcessModel(problemdata.Dataset, problemdata.TargetVariable, problemdata.AllowedInputVariables, problemdata.TrainingIndices, hyp, mean, cov);
     121          model.FixParameters();
     122          var sol = new GaussianProcessRegressionSolution(model, problemdata);
     123          if (solution == null || solution.TrainingMeanSquaredError > sol.TrainingMeanSquaredError) solution = sol;
     124        }
     125        catch (ArgumentException) { }
     126      }
     127      if (solution == null) throw new ArgumentException("The algorithm didn't return a model");
     128      regressionAlgorithm.Runs.Clear();
     129      return solution;
     130    }
     131
     132    //RegressionModel extensions
     133    public const double DuplicateResolution = 0.000001;
     134    public static Dataset GetDataSet(IReadOnlyList<Tuple<RealVector, double>> samples, bool removeDuplicates) {
     135      if (removeDuplicates) samples = RemoveDuplicates(samples); //TODO duplicate removal leads to incorrect uncertainty values in models
     136      var dimensions = samples[0].Item1.Length + 1;
     137      var data = new double[samples.Count, dimensions];
     138      var names = new string[dimensions - 1];
     139      for (var i = 0; i < names.Length; i++) names[i] = "input" + i;
     140      for (var j = 0; j < samples.Count; j++) {
     141        for (var i = 0; i < names.Length; i++) data[j, i] = samples[j].Item1[i];
     142        data[j, dimensions - 1] = samples[j].Item2;
     143      }
     144      return new Dataset(names.Concat(new[] { "output" }).ToArray(), data);
     145    }
     146    private static IReadOnlyList<Tuple<RealVector, double>> RemoveDuplicates(IReadOnlyList<Tuple<RealVector, double>> samples) {
     147      var res = new List<Tuple<RealVector, double, int>>();
     148      foreach (var sample in samples) {
     149        if (res.Count == 0) {
     150          res.Add(new Tuple<RealVector, double, int>(sample.Item1, sample.Item2, 1));
     151          continue;
     152        }
     153        var index = res.ArgMin(x => Euclidian(sample.Item1, x.Item1));
     154        var d = Euclidian(res[index].Item1, sample.Item1);
     155        if (d > DuplicateResolution) res.Add(new Tuple<RealVector, double, int>(sample.Item1, sample.Item2, 1));
     156        else {
     157          var t = res[index];
     158          res.RemoveAt(index);
     159          res.Add(new Tuple<RealVector, double, int>(t.Item1, t.Item2 + sample.Item2, t.Item3 + 1));
     160        }
     161      }
     162      return res.Select(x => new Tuple<RealVector, double>(x.Item1, x.Item2 / x.Item3)).ToArray();
     163    }
     164    private static double Euclidian(IEnumerable<double> a, IEnumerable<double> b) {
     165      return Math.Sqrt(a.Zip(b, (d, d1) => d - d1).Sum(d => d * d));
     166    }
     167  }
     168}
  • HeuristicLab.Algorithms.SAPBA/Strategies/InfillStrategy.cs

     
     1#region License Information
     2/* HeuristicLab
     3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 *
     5 * This file is part of HeuristicLab.
     6 *
     7 * HeuristicLab is free software: you can redistribute it and/or modify
     8 * it under the terms of the GNU General Public License as published by
     9 * the Free Software Foundation, either version 3 of the License, or
     10 * (at your option) any later version.
     11 *
     12 * HeuristicLab is distributed in the hope that it will be useful,
     13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 * GNU General Public License for more details.
     16 *
     17 * You should have received a copy of the GNU General Public License
     18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
     19 */
     20#endregion
     21
     22using System;
     23using System.Linq;
     24using HeuristicLab.Algorithms.EGO;
     25using HeuristicLab.Common;
     26using HeuristicLab.Core;
     27using HeuristicLab.Data;
     28using HeuristicLab.Encodings.RealVectorEncoding;
     29using HeuristicLab.Optimization;
     30using HeuristicLab.Parameters;
     31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     32
     33namespace HeuristicLab.Algorithms.SAPBA {
     34  [StorableClass]
     35  public class InfillStrategy : StrategyBase {
     36    #region Parameternames
     37    public const string NoGenerationsParameterName = "Number of generations";
     38    public const string NoIndividualsParameterName = "Number of individuals";
     39    public const string InfillCriterionParameterName = "InfillCriterion";
     40    #endregion
     41    #region Paramterproperties
     42    public IFixedValueParameter<IntValue> NoGenerationsParameter => Parameters[NoGenerationsParameterName] as IFixedValueParameter<IntValue>;
     43    public IFixedValueParameter<IntValue> NoIndividualsParameter => Parameters[NoIndividualsParameterName] as IFixedValueParameter<IntValue>;
     44    public IConstrainedValueParameter<IInfillCriterion> InfillCriterionParameter => Parameters[InfillCriterionParameterName] as IConstrainedValueParameter<IInfillCriterion>;
     45    #endregion
     46    #region Properties
     47    public IntValue NoGenerations => NoGenerationsParameter.Value;
     48    public IntValue NoIndividuals => NoIndividualsParameter.Value;
     49    public IInfillCriterion InfillCriterion => InfillCriterionParameter.Value;
     50    [Storable]
     51    public int Generations;
     52    #endregion
     53
     54    #region Constructors
     55    [StorableConstructor]
     56    protected InfillStrategy(bool deserializing) : base(deserializing) { }
     57    [StorableHook(HookType.AfterDeserialization)]
     58    private void AfterDeserialization() {
     59      AttachListeners();
     60    }
     61    protected InfillStrategy(InfillStrategy original, Cloner cloner) : base(original, cloner) {
     62      Generations = original.Generations;
     63      AttachListeners();
     64    }
     65    public InfillStrategy() {
     66      var critera = new ItemSet<IInfillCriterion> { new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuality(), new ExpectedQuantileImprovement(), new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
     67      Parameters.Add(new FixedValueParameter<IntValue>(NoGenerationsParameterName, "The number of generations before a new model is constructed", new IntValue(3)));
     68      Parameters.Add(new FixedValueParameter<IntValue>(NoIndividualsParameterName, "The number of individuals that are sampled each generation ", new IntValue(3)));
     69      Parameters.Add(new ConstrainedValueParameter<IInfillCriterion>(InfillCriterionParameterName, "The infill criterion used to cheaply evaluate points.", critera, critera.First()));
     70      AttachListeners();
     71    }
     72    public override IDeepCloneable Clone(Cloner cloner) {
     73      return new InfillStrategy(this, cloner);
     74    }
     75    #endregion
     76
     77    protected override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, ResultCollection globalResults, IRandom random) { }
     78    protected override void ProcessPopulation(Individual[] individuals, double[] qualities, IRandom random) {
     79      if (RegressionSolution != null && Generations < NoGenerations.Value) Generations++;
     80      else {
     81        //Select NoIndividuals best samples
     82        var samples = individuals
     83          .Zip(qualities, (individual, d) => new Tuple<Individual, double>(individual, d))
     84          .OrderBy(t => Problem.Maximization ? -t.Item2 : t.Item2)
     85          .Take(NoIndividuals.Value)
     86          .Select(t => t.Item1.RealVector());
     87        foreach (var indi in samples) EvaluateSample(indi, random);
     88        BuildRegressionSolution(random);
     89        Generations = 0;
     90      }
     91    }
     92    protected override void Initialize() {
     93      Generations = 0;
     94    }
     95
     96    #region events
     97    private void AttachListeners() {
     98      ModelChanged += OnModelChanged;
     99    }
     100    private void OnModelChanged(object sender, EventArgs e) {
     101      InfillCriterion.Encoding = Problem?.Encoding as RealVectorEncoding;
     102      InfillCriterion.RegressionSolution = RegressionSolution;
     103      InfillCriterion.ExpensiveMaximization = Problem?.Maximization ?? false;
     104      if (RegressionSolution != null && InfillCriterion.Encoding != null)
     105        InfillCriterion.Initialize();
     106    }
     107    #endregion
     108
     109    protected override double Estimate(RealVector point, IRandom random) {
     110      return InfillCriterion.Maximization() != Maximization() ? -InfillCriterion.Evaluate(point) : InfillCriterion.Evaluate(point);
     111    }
     112  }
     113}
  • HeuristicLab.Algorithms.SAPBA/Strategies/LamarckianStrategy.cs

     
     1#region License Information
     2/* HeuristicLab
     3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 *
     5 * This file is part of HeuristicLab.
     6 *
     7 * HeuristicLab is free software: you can redistribute it and/or modify
     8 * it under the terms of the GNU General Public License as published by
     9 * the Free Software Foundation, either version 3 of the License, or
     10 * (at your option) any later version.
     11 *
     12 * HeuristicLab is distributed in the hope that it will be useful,
     13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 * GNU General Public License for more details.
     16 *
     17 * You should have received a copy of the GNU General Public License
     18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
     19 */
     20#endregion
     21
     22using System;
     23using System.Collections.Generic;
     24using System.Linq;
     25using HeuristicLab.Algorithms.DataAnalysis;
     26using HeuristicLab.Algorithms.EGO;
     27using HeuristicLab.Algorithms.SAPBA.Operators;
     28using HeuristicLab.Analysis;
     29using HeuristicLab.Common;
     30using HeuristicLab.Core;
     31using HeuristicLab.Data;
     32using HeuristicLab.Encodings.RealVectorEncoding;
     33using HeuristicLab.Optimization;
     34using HeuristicLab.Parameters;
     35using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     36using HeuristicLab.Problems.DataAnalysis;
     37
     38namespace HeuristicLab.Algorithms.SAPBA {
     39  [StorableClass]
     40  public class LamarckianStrategy : InfillStrategy {
     41    #region Parameternames
     42    public const string NoTrainingPointsParameterName = "Number of Trainingpoints";
     43    public const string LocalInfillCriterionParameterName = "LocalInfillCriterion";
     44    public const string OptimizationAlgorithmParameterName = "Optimization Algorithm";
     45    public const string RegressionAlgorithmParameterName = "Regression Algorithm";
     46    #endregion
     47    #region Parameters
     48    public IFixedValueParameter<IntValue> NoTrainingPointsParameter => Parameters[NoTrainingPointsParameterName] as IFixedValueParameter<IntValue>;
     49    public IValueParameter<IAlgorithm> OptimizationAlgorithmParameter => Parameters[OptimizationAlgorithmParameterName] as IValueParameter<IAlgorithm>;
     50    public IValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>> RegressionAlgorithmParameter => Parameters[RegressionAlgorithmParameterName] as IValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>;
     51    public IConstrainedValueParameter<IInfillCriterion> LocalInfillCriterionParameter => Parameters[LocalInfillCriterionParameterName] as IConstrainedValueParameter<IInfillCriterion>;
     52    #endregion
     53    #region Properties
     54    public IntValue NoTrainingPoints => NoTrainingPointsParameter.Value;
     55    public IAlgorithm OptimizationAlgorithm => OptimizationAlgorithmParameter.Value;
     56    public IDataAnalysisAlgorithm<IRegressionProblem> RegressionAlgorithm => RegressionAlgorithmParameter.Value;
     57    public IInfillCriterion LocalInfillCriterion => LocalInfillCriterionParameter.Value;
     58    #endregion
     59
     60    #region Constructors
     61    [StorableConstructor]
     62    protected LamarckianStrategy(bool deserializing) : base(deserializing) { }
     63    [StorableHook(HookType.AfterDeserialization)]
     64    private void AfterDeserialization() {
     65      RegisterParameterEvents();
     66    }
     67    protected LamarckianStrategy(LamarckianStrategy original, Cloner cloner) : base(original, cloner) {
     68      RegisterParameterEvents();
     69    }
     70    public LamarckianStrategy() {
     71      var localCritera = new ItemSet<IInfillCriterion> { new ExpectedQuality(), new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuantileImprovement(), new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
     72      var osEs = new OffspringSelectionEvolutionStrategy.OffspringSelectionEvolutionStrategy {
     73        Problem = new InfillProblem(),
     74        ComparisonFactor = { Value = 1.0 },
     75        MaximumGenerations = { Value = 1000 },
     76        MaximumEvaluatedSolutions = { Value = 100000 },
     77        PlusSelection = { Value = true },
     78        PopulationSize = { Value = 1 }
     79      };
     80      osEs.MutatorParameter.Value = osEs.MutatorParameter.ValidValues.OfType<MultiRealVectorManipulator>().First();
     81      Parameters.Add(new FixedValueParameter<IntValue>(NoTrainingPointsParameterName, "The number of sample points used to create a local model", new IntValue(50)));
     82      Parameters.Add(new ConstrainedValueParameter<IInfillCriterion>(LocalInfillCriterionParameterName, "The infill criterion used to cheaply evaluate points.", localCritera, localCritera.First()));
     83      Parameters.Add(new ValueParameter<IAlgorithm>(OptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", osEs));
     84      Parameters.Add(new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>(RegressionAlgorithmParameterName, "The model used to approximate the problem", new GaussianProcessRegression { Problem = new RegressionProblem() }));
     85      RegisterParameterEvents();
     86    }
     87    public override IDeepCloneable Clone(Cloner cloner) {
     88      return new LamarckianStrategy(this, cloner);
     89    }
     90    #endregion
     91
     92    //Short lived stores for analysis
     93    private readonly List<double> LamarckValues = new List<double>();
     94    private readonly List<double> SampleValues = new List<double>();
     95
     96    protected override void Initialize() {
     97      base.Initialize();
     98      var infillProblem = OptimizationAlgorithm.Problem as InfillProblem;
     99      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
     100      infillProblem.InfillCriterion = LocalInfillCriterion;
     101    }
     102    protected override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, ResultCollection globalResults, IRandom random) {
     103      base.Analyze(individuals, qualities, results, globalResults, random);
     104      const string plotName = "Lamarck Comparison";
     105      const string lamarckRow = "Lamarck Values";
     106      const string samplesRow = "Original Values";
     107      if (!globalResults.ContainsKey(plotName))
     108        globalResults.Add(new Result(plotName, new DataTable(plotName)));
     109
     110      var plot = (DataTable)globalResults[plotName].Value;
     111      if (!plot.Rows.ContainsKey(lamarckRow)) plot.Rows.Add(new DataRow(lamarckRow));
     112      if (!plot.Rows.ContainsKey(samplesRow)) plot.Rows.Add(new DataRow(samplesRow));
     113      plot.Rows[lamarckRow].Values.AddRange(LamarckValues);
     114      plot.Rows[samplesRow].Values.AddRange(SampleValues);
     115      LamarckValues.Clear();
     116      SampleValues.Clear();
     117
     118      //analyze Hypervolumes
     119      const string volPlotName = "Hypervolumes Comparison";
     120      const string mainRowName = "Population Volume (log)";
     121      const string subspaceRowName = "Subspace Volume (log) for Lamarck Candidate ";
     122      if (!globalResults.ContainsKey(volPlotName))
     123        globalResults.Add(new Result(volPlotName, new DataTable(volPlotName)));
     124
     125      plot = (DataTable)globalResults[volPlotName].Value;
     126      if (!plot.Rows.ContainsKey(mainRowName)) plot.Rows.Add(new DataRow(mainRowName));
     127      var v = Math.Log(GetStableVolume(GetBoundingBox(individuals.Select(x => x.RealVector()))));
     128      plot.Rows[mainRowName].Values.Add(v);
     129
     130      var indis = individuals
     131          .Zip(qualities, (individual, d) => new Tuple<Individual, double>(individual, d))
     132          .OrderBy(t => SapbaAlgorithm.Problem.Maximization ? -t.Item2 : t.Item2)
     133          .Take(NoIndividuals.Value)
     134          .Select(t => t.Item1).ToArray();
     135
     136      for (var i = 0; i < indis.Length; i++) {
     137        var samples = GetNearestSamples(NoTrainingPoints.Value, indis[i].RealVector());
     138        var d = Math.Log(GetStableVolume(GetBoundingBox(samples.Select(x => x.Item1))));
     139        if (!plot.Rows.ContainsKey(subspaceRowName + i)) plot.Rows.Add(new DataRow(subspaceRowName + i));
     140        plot.Rows[subspaceRowName + i].Values.Add(d);
     141      }
     142
     143
     144
     145    }
     146    protected override void ProcessPopulation(Individual[] individuals, double[] qualities, IRandom random) {
     147      if (RegressionSolution == null) return;
     148      if (Generations < NoGenerations.Value) Generations++;
     149      else {
     150        //Select best Individuals
     151        var indis = individuals
     152          .Zip(qualities, (individual, d) => new Tuple<Individual, double>(individual, d))
     153          .OrderBy(t => Problem.Maximization ? -t.Item2 : t.Item2)
     154          .Take(NoIndividuals.Value)
     155          .Select(t => t.Item1).ToArray();
     156        //Evaluate individuals
     157        foreach (var individual in indis)
     158          SampleValues.Add(EvaluateSample(individual.RealVector(), random).Item2);
     159
     160        //Perform memetic replacement for all points
     161        for (var i = 0; i < indis.Length; i++) {
     162          var vector = indis[i].RealVector();
     163          var altVector = OptimizeInfillProblem(vector, random);
     164          LamarckValues.Add(EvaluateSample(altVector, random).Item2);
     165          if (LamarckValues[i] < SampleValues[i] == Problem.Maximization) continue;
     166          for (var j = 0; j < vector.Length; j++) vector[j] = altVector[j];
     167        }
     168
     169        BuildRegressionSolution(random);
     170        Generations = 0;
     171      }
     172    }
     173
     174    #region Events
     175    private void RegisterParameterEvents() {
     176      OptimizationAlgorithmParameter.ValueChanged += OnInfillAlgorithmChanged;
     177      OptimizationAlgorithm.ProblemChanged += OnInfillProblemChanged;
     178      LocalInfillCriterionParameter.ValueChanged += OnInfillCriterionChanged;
     179    }
     180    private void OnInfillCriterionChanged(object sender, EventArgs e) {
     181      ((InfillProblem)OptimizationAlgorithm.Problem).InfillCriterion = LocalInfillCriterion;
     182    }
     183    private void OnInfillAlgorithmChanged(object sender, EventArgs e) {
     184      OptimizationAlgorithm.Problem = new InfillProblem { InfillCriterion = LocalInfillCriterion };
     185      OptimizationAlgorithm.ProblemChanged -= OnInfillProblemChanged; //avoid double attaching
     186      OptimizationAlgorithm.ProblemChanged += OnInfillProblemChanged;
     187    }
     188    private void OnInfillProblemChanged(object sender, EventArgs e) {
     189      OptimizationAlgorithm.ProblemChanged -= OnInfillProblemChanged;
     190      OptimizationAlgorithm.Problem = new InfillProblem { InfillCriterion = LocalInfillCriterion };
     191      OptimizationAlgorithm.ProblemChanged += OnInfillProblemChanged;
     192    }
     193    #endregion
     194
     195    #region helpers
     196    private RealVector OptimizeInfillProblem(RealVector point, IRandom random) {
     197      var infillProblem = OptimizationAlgorithm.Problem as InfillProblem;
     198      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
     199      if (infillProblem.InfillCriterion != LocalInfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correctly set.");
     200
     201      var points = Math.Min(NoTrainingPoints.Value, Samples.Count);
     202      var samples = GetNearestSamples(points, point);
     203      var regression = SapbaUtilities.BuildModel(samples, RegressionAlgorithm, random);
     204      var box = GetBoundingBox(samples.Select(x => x.Item1));
     205
     206      infillProblem.Encoding.Length = ((RealVectorEncoding)Problem.Encoding).Length;
     207      infillProblem.Encoding.Bounds = box;
     208      infillProblem.Encoding.SolutionCreator = new FixedRealVectorCreator(point);
     209      infillProblem.Initialize(regression, Problem.Maximization);
     210      var res = SapbaUtilities.SyncRunSubAlgorithm(OptimizationAlgorithm, random.Next(int.MaxValue));
     211      if (!res.ContainsKey(InfillProblem.BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
     212      var v = res[InfillProblem.BestInfillSolutionResultName].Value as RealVector;
     213      if (v == null) throw new ArgumentException("The InfillOptimizationAlgorithm did not return the expected result types");
     214      if (!InBounds(v, box)) throw new ArgumentException("Vector not in bounds");
     215      OptimizationAlgorithm.Runs.Clear();
     216      return v;
     217    }
     218    private Tuple<RealVector, double>[] GetNearestSamples(int noSamples, RealVector point) {
     219      return Samples.Select(sample => Tuple.Create(SquaredEuclidean(sample.Item1, point), sample)).OrderBy(x => x.Item1).Take(noSamples).Select(x => x.Item2).ToArray();
     220    }
     221    private static DoubleMatrix GetBoundingBox(IEnumerable<RealVector> samples) {
     222      DoubleMatrix m = null;
     223      foreach (var sample in samples)
     224        if (m == null) {
     225          m = new DoubleMatrix(sample.Length, 2);
     226          for (var i = 0; i < sample.Length; i++) m[i, 0] = m[i, 1] = sample[i];
     227        } else
     228          for (var i = 0; i < sample.Length; i++) {
     229            m[i, 0] = Math.Min(m[i, 0], sample[i]);
     230            m[i, 1] = Math.Max(m[i, 1], sample[i]);
     231          }
     232      return m;
     233    }
     234
     235    //the volume of a bounded-box whith slightly increased dimensions (Volume can never reach 0)
     236    private static double GetStableVolume(DoubleMatrix bounds) {
     237      var res = 1.0;
     238      for (var i = 0; i < bounds.Rows; i++) res *= bounds[i, 1] - bounds[i, 0] + 0.1;
     239      return res;
     240    }
     241    private static bool InBounds(RealVector r, DoubleMatrix bounds) {
     242      return !r.Where((t, i) => t < bounds[i, 0] || t > bounds[i, 1]).Any();
     243    }
     244    private static double SquaredEuclidean(RealVector a, RealVector b) {
     245      return a.Select((t, i) => t - b[i]).Sum(d => d * d);
     246    }
     247    #endregion
     248  }
     249}
  • HeuristicLab.Algorithms.SAPBA/Strategies/StrategyBase.cs

     
    2222using System;
    2323using System.Collections.Generic;
    2424using System.Linq;
    25 using System.Threading;
    2625using HeuristicLab.Analysis;
    2726using HeuristicLab.Common;
    2827using HeuristicLab.Core;
     
    3231using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3332using HeuristicLab.Problems.DataAnalysis;
    3433
    35 namespace HeuristicLab.Algorithms.SAPBA.Strategies {
     34namespace HeuristicLab.Algorithms.SAPBA {
    3635  [StorableClass]
    3736  public abstract class StrategyBase : ParameterizedNamedItem, ISurrogateStrategy {
    3837    #region Properties
    3938    [Storable]
    40     protected SurrogateAssistedPopulationBasedAlgorithm Algorithm;
     39    protected SurrogateAssistedPopulationBasedAlgorithm SapbaAlgorithm;
     40    protected SingleObjectiveBasicProblem<IEncoding> Problem => SapbaAlgorithm?.Problem;
    4141    [Storable]
    42     private List<Tuple<RealVector, double>> Samples;
     42    protected List<Tuple<RealVector, double>> Samples;
    4343    [Storable]
    44     protected IRegressionSolution RegressionSolution;
    45     protected CancellationToken Cancellation;
    46     private IEnumerable<Tuple<RealVector, double>> TruncatedSamples => Samples.Count > Algorithm.MaximalDatasetSize && Algorithm.MaximalDatasetSize > 0 ? Samples.Skip(Samples.Count - Algorithm.MaximalDatasetSize) : Samples;
     44    private IRegressionSolution regressionSolution;
     45
     46    public IRegressionSolution RegressionSolution
     47    {
     48      get { return regressionSolution; }
     49      protected set
     50      {
     51        regressionSolution = value;
     52        OnModelChanged();
     53      }
     54    }
     55
     56    private List<Tuple<RealVector, double>> TruncatedSamples => Samples.Count > SapbaAlgorithm.MaximalDatasetSize && SapbaAlgorithm.MaximalDatasetSize > 0 ? Samples.Skip(Samples.Count - SapbaAlgorithm.MaximalDatasetSize).ToList() : Samples;
    4757    #endregion
    4858
     59    #region Events
     60    public event EventHandler ModelChanged;
     61    private void OnModelChanged() {
     62      ModelChanged?.Invoke(this, EventArgs.Empty);
     63      OnToStringChanged();
     64    }
     65    #endregion
     66
    4967    #region ResultName
    5068    private const string BestQualityResultName = "Best Quality";
    5169    private const string BestSolutionResultName = "Best Solution";
     
    6482    [StorableConstructor]
    6583    protected StrategyBase(bool deserializing) : base(deserializing) { }
    6684    protected StrategyBase(StrategyBase original, Cloner cloner) : base(original, cloner) {
    67       if (original.Samples != null) Samples = original.Samples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
     85      Samples = original.Samples?.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
    6886      RegressionSolution = cloner.Clone(original.RegressionSolution);
     87      SapbaAlgorithm = cloner.Clone(original.SapbaAlgorithm);
    6988    }
    7089    protected StrategyBase() { }
    7190    #endregion
    7291
    73     public abstract double Evaluate(RealVector r, IRandom random);
    7492    protected abstract void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, ResultCollection globalResults, IRandom random);
    7593    protected abstract void ProcessPopulation(Individual[] individuals, double[] qualities, IRandom random);
    76     protected abstract void Initialize();
     94    protected virtual void Initialize() { }
     95    //protected virtual void OnModelChanged() { }
    7796
     97    protected abstract double Estimate(RealVector r, IRandom random);
     98
     99    public double Evaluate(RealVector r, IRandom random) {
     100      if (Samples.Count < SapbaAlgorithm.InitialEvaluations) return EvaluateSample(r, random).Item2;
     101      if (Samples.Count == SapbaAlgorithm.InitialEvaluations && RegressionSolution == null) {
     102        BuildRegressionSolution(random);
     103        OnModelChanged();
     104      }
     105      return Estimate(r, random);
     106    }
    78107    public void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
    79       Algorithm.Problem.Analyze(individuals, qualities, results, random);
     108      SapbaAlgorithm.Problem.Analyze(individuals, qualities, results, random);
    80109      ProcessPopulation(individuals, qualities, random);
    81110
    82       var globalResults = Algorithm.Results;
     111      var globalResults = SapbaAlgorithm.Results;
    83112      if (!globalResults.ContainsKey(EvaluatedSoultionsResultName)) globalResults.Add(new Result(EvaluatedSoultionsResultName, new IntValue(Samples.Count)));
    84113      else ((IntValue)globalResults[EvaluatedSoultionsResultName].Value).Value = Samples.Count;
    85114      if (!globalResults.ContainsKey(IterationsResultName)) globalResults.Add(new Result(IterationsResultName, new IntValue(0)));
    86115      else ((IntValue)globalResults[IterationsResultName].Value).Value++;
    87116
    88       if (Samples.Count != 0) {
    89         var min = Samples.Min(x => x.Item2);
    90         var max = Samples.Max(x => x.Item2);
    91         var bestIdx = Algorithm.Problem.Maximization ? Samples.ArgMax(x => x.Item2) : Samples.ArgMin(x => x.Item2);
    92117
    93         if (!globalResults.ContainsKey(BestQualityResultName)) globalResults.Add(new Result(BestQualityResultName, new DoubleValue(0.0)));
    94         ((DoubleValue)globalResults[BestQualityResultName].Value).Value = Samples[bestIdx].Item2;
    95         if (!globalResults.ContainsKey(BestSolutionResultName)) globalResults.Add(new Result(BestSolutionResultName, new RealVector()));
    96         globalResults[BestSolutionResultName].Value = Samples[bestIdx].Item1;
     118      AnalyzeSamplesProgression(globalResults);
     119      AnalyzeQualities(globalResults);
    97120
    98         DataTable table;
    99         if (!globalResults.ContainsKey(QualityTableResultName)) {
    100           table = new DataTable("Qualites", "Qualites over iteration");
    101           globalResults.Add(new Result(QualityTableResultName, table));
    102           table.Rows.Add(new DataRow(BestQualityRowName, "Best Quality"));
    103           table.Rows.Add(new DataRow(WorstQualityRowName, "Worst Quality"));
    104           table.Rows.Add(new DataRow(CurrentQualityRowName, "Current Quality"));
    105           table.Rows.Add(new DataRow(MedianQualityRowName, "Median Quality"));
    106           table.Rows.Add(new DataRow(AverageQualityRowName, "Average Quality"));
    107         }
    108         table = (DataTable)globalResults[QualityTableResultName].Value;
    109         table.Rows[BestQualityResultName].Values.Add(Algorithm.Problem.Maximization ? max : min);
    110         table.Rows[WorstQualityRowName].Values.Add(Algorithm.Problem.Maximization ? min : max);
    111         table.Rows[CurrentQualityRowName].Values.Add(Samples[Samples.Count - 1].Item2);
    112         table.Rows[AverageQualityRowName].Values.Add(Samples.Average(x => x.Item2));
    113         table.Rows[MedianQualityRowName].Values.Add(Samples.Select(x => x.Item2).Median());
    114       }
    115121
     122
    116123      if (RegressionSolution != null) {
    117124        if (!globalResults.ContainsKey(RegressionSolutionResultName))
    118125          globalResults.Add(new Result(RegressionSolutionResultName, RegressionSolution));
     
    122129
    123130      Analyze(individuals, qualities, results, globalResults, random);
    124131    }
     132    private void AnalyzeSamplesProgression(ResultCollection globalResults) {
     133      const string samplesTableName = "SamplesProgression";
     134      const string minRowName = "Minimum";
     135      const string maxRowName = "Maximum";
     136      const string medianRowName = "Median";
     137      const string averageRowName = "Average";
     138      const string currentRowName = "Current";
     139
     140      if (!globalResults.ContainsKey(samplesTableName)) { globalResults.Add(new Result(samplesTableName, new DataTable())); }
     141      var table = (DataTable)globalResults[samplesTableName].Value;
     142
     143      if (!table.Rows.ContainsKey(minRowName)) table.Rows.Add(new DataRow(minRowName));
     144      if (!table.Rows.ContainsKey(maxRowName)) table.Rows.Add(new DataRow(maxRowName));
     145      if (!table.Rows.ContainsKey(medianRowName)) table.Rows.Add(new DataRow(medianRowName));
     146      if (!table.Rows.ContainsKey(averageRowName)) table.Rows.Add(new DataRow(averageRowName));
     147      if (!table.Rows.ContainsKey(currentRowName)) table.Rows.Add(new DataRow(currentRowName));
     148
     149      for (var i = table.Rows[minRowName].Values.Count + 1; i < Samples.Count; i++) {
     150        var subSamples = Samples.Take(i).Select(x => x.Item2).ToArray();
     151        table.Rows[minRowName].Values.Add(subSamples.Min());
     152        table.Rows[maxRowName].Values.Add(subSamples.Max());
     153        table.Rows[medianRowName].Values.Add(subSamples.Median());
     154        table.Rows[averageRowName].Values.Add(subSamples.Average());
     155        table.Rows[currentRowName].Values.Add(subSamples[subSamples.Length - 1]);
     156      }
     157    }
     158    private void AnalyzeQualities(ResultCollection globalResults) {
     159      if (Samples.Count == 0) return;
     160      var min = Samples.Min(x => x.Item2);
     161      var max = Samples.Max(x => x.Item2);
     162      var bestIdx = SapbaAlgorithm.Problem.Maximization ? Samples.ArgMax(x => x.Item2) : Samples.ArgMin(x => x.Item2);
     163
     164      if (!globalResults.ContainsKey(BestQualityResultName)) globalResults.Add(new Result(BestQualityResultName, new DoubleValue(0.0)));
     165      ((DoubleValue)globalResults[BestQualityResultName].Value).Value = Samples[bestIdx].Item2;
     166      if (!globalResults.ContainsKey(BestSolutionResultName)) globalResults.Add(new Result(BestSolutionResultName, new RealVector()));
     167      globalResults[BestSolutionResultName].Value = Samples[bestIdx].Item1;
     168
     169      DataTable table;
     170      if (!globalResults.ContainsKey(QualityTableResultName)) {
     171        table = new DataTable("Qualites", "Qualites over iteration");
     172        globalResults.Add(new Result(QualityTableResultName, table));
     173        table.Rows.Add(new DataRow(BestQualityRowName, "Best Quality"));
     174        table.Rows.Add(new DataRow(WorstQualityRowName, "Worst Quality"));
     175        table.Rows.Add(new DataRow(CurrentQualityRowName, "Current Quality"));
     176        table.Rows.Add(new DataRow(MedianQualityRowName, "Median Quality"));
     177        table.Rows.Add(new DataRow(AverageQualityRowName, "Average Quality"));
     178      }
     179      table = (DataTable)globalResults[QualityTableResultName].Value;
     180      table.Rows[BestQualityResultName].Values.Add(SapbaAlgorithm.Problem.Maximization ? max : min);
     181      table.Rows[WorstQualityRowName].Values.Add(SapbaAlgorithm.Problem.Maximization ? min : max);
     182      table.Rows[CurrentQualityRowName].Values.Add(Samples[Samples.Count - 1].Item2);
     183      table.Rows[AverageQualityRowName].Values.Add(Samples.Average(x => x.Item2));
     184      table.Rows[MedianQualityRowName].Values.Add(Samples.Select(x => x.Item2).Median());
     185    }
     186
    125187    public void Initialize(SurrogateAssistedPopulationBasedAlgorithm algorithm) {
    126       Algorithm = algorithm;
    127       Samples = algorithm.InitialSamples?.ToList() ?? new List<Tuple<RealVector, double>>();
     188      SapbaAlgorithm = algorithm;
     189      Samples = algorithm?.InitialSamples?.ToList() ?? new List<Tuple<RealVector, double>>();
    128190      RegressionSolution = null;
    129191      Initialize();
    130192    }
     193    public virtual bool Maximization() {
     194      return SapbaAlgorithm?.Problem?.Maximization ?? false;
     195    }
    131196
    132197    #region Helpers for Subclasses
    133198    protected void BuildRegressionSolution(IRandom random) {
    134       RegressionSolution = EgoUtilities.BuildModel(Cancellation, TruncatedSamples, Algorithm.RegressionAlgorithm, random, Algorithm.RemoveDuplicates, RegressionSolution);
     199      RegressionSolution = SapbaUtilities.BuildModel(TruncatedSamples, SapbaAlgorithm.RegressionAlgorithm, random, SapbaAlgorithm.RemoveDuplicates, RegressionSolution);
    135200    }
    136201    protected Tuple<RealVector, double> EvaluateSample(RealVector point, IRandom random) {
    137       Cancellation.ThrowIfCancellationRequested();
    138       if (Samples.Count >= Algorithm.MaximumEvaluations) { Algorithm.OptimizationAlgorithm.Stop(); return new Tuple<RealVector, double>(point, 0.0); }
    139       var p = new Tuple<RealVector, double>(point, Algorithm.Problem.Evaluate(GetIndividual(point), random));
     202      if (Samples.Count >= SapbaAlgorithm.MaximumEvaluations) { SapbaAlgorithm.OptimizationAlgorithm.Stop(); return new Tuple<RealVector, double>(point, 0.0); }
     203      var p = new Tuple<RealVector, double>((RealVector)point.Clone(), SapbaAlgorithm.Problem.Evaluate(GetIndividual(point), random));
    140204      Samples.Add(p);
    141205      return p;
    142206    }
    143     protected Tuple<RealVector, double> EstimateSample(RealVector point, IRandom random) {
    144       if (Samples.Count == Algorithm.InitialEvaluations && RegressionSolution == null) BuildRegressionSolution(random);
    145       return Samples.Count < Algorithm.InitialEvaluations ? EvaluateSample(point, random) : new Tuple<RealVector, double>(point, RegressionSolution.Model.GetEstimation(point));
     207    protected double EstimateSample(RealVector r) {
     208      return RegressionSolution.Model.GetEstimation(r);
    146209    }
     210
    147211    #endregion
    148212
    149213    #region Helpers
    150214    private Individual GetIndividual(RealVector r) {
    151215      var scope = new Scope();
    152       scope.Variables.Add(new Variable(Algorithm.Problem.Encoding.Name, r));
    153       return new SingleEncodingIndividual(Algorithm.Problem.Encoding, scope);
     216      scope.Variables.Add(new Variable(SapbaAlgorithm.Problem.Encoding.Name, r));
     217      return new SingleEncodingIndividual(SapbaAlgorithm.Problem.Encoding, scope);
    154218    }
    155 
    156     public void UpdateCancellation(CancellationToken cancellationToken) {
    157       Cancellation = cancellationToken;
    158     }
    159219    #endregion
    160220  }
    161221}
    162  No newline at end of file
  • HeuristicLab.Algorithms.SAPBA/SurrogateAssistedPopulationBasedAlgorithm.cs

     
    2424using System.Linq;
    2525using System.Threading;
    2626using HeuristicLab.Algorithms.DataAnalysis;
    27 using HeuristicLab.Algorithms.SAPBA.Strategies;
    2827using HeuristicLab.Common;
    2928using HeuristicLab.Core;
    3029using HeuristicLab.Data;
     
    3332using HeuristicLab.Parameters;
    3433using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3534using HeuristicLab.Problems.DataAnalysis;
    36 using HeuristicLab.Random;
    3735
    3836namespace HeuristicLab.Algorithms.SAPBA {
    3937  [StorableClass]
     
    4139  [Item("SurrogateAssistedPopulationBasedAlgorithm", "")]
    4240  public class SurrogateAssistedPopulationBasedAlgorithm : BasicAlgorithm, ISurrogateAlgorithm<RealVector> {
    4341    #region Basic-Alg-Essentials
    44     public override bool SupportsPause => true;
     42    public override bool SupportsPause => false;
    4543    public override Type ProblemType => typeof(SingleObjectiveBasicProblem<IEncoding>);
    4644    public new SingleObjectiveBasicProblem<IEncoding> Problem
    4745    {
     
    9189
    9290    #region StorableProperties
    9391    [Storable]
    94     private IRandom Random = new MersenneTwister();
    95     [Storable]
    9692    public List<Tuple<RealVector, double>> InitialSamples { get; private set; }
    9793    [Storable]
    98     public SurrogateProblem surrogateProblem;
    99     public void SetInitialSamples(RealVector[] solutions, double[] qualities) {
    100       InitialSamples = solutions.Zip(qualities, (vector, d) => new Tuple<RealVector, double>(vector, d)).ToList();
    101     }
     94    public SurrogateProblem SurrogateProblem;
    10295    #endregion
    10396
    10497    #region HLConstructors
     
    109102      RegisterEventhandlers();
    110103    }
    111104    protected SurrogateAssistedPopulationBasedAlgorithm(SurrogateAssistedPopulationBasedAlgorithm original, Cloner cloner) : base(original, cloner) {
    112       Random = cloner.Clone(Random);
    113       if (original.InitialSamples != null) InitialSamples = original.InitialSamples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
     105      InitialSamples = original.InitialSamples?.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
     106      SurrogateProblem = cloner.Clone(original.SurrogateProblem);
    114107      RegisterEventhandlers();
    115108    }
    116109    public override IDeepCloneable Clone(Cloner cloner) { return new SurrogateAssistedPopulationBasedAlgorithm(this, cloner); }
    117110    public SurrogateAssistedPopulationBasedAlgorithm() {
    118       surrogateProblem = new SurrogateProblem();
    119       var geneticAlgorithm = new GeneticAlgorithm.GeneticAlgorithm {
    120         PopulationSize = { Value = 50 },
    121         Problem = surrogateProblem
    122       };
    123       var model = new GaussianProcessRegression {
    124         Problem = new RegressionProblem()
    125       };
     111      SurrogateProblem = new SurrogateProblem();
     112      var geneticAlgorithm = new GeneticAlgorithm.GeneticAlgorithm { PopulationSize = { Value = 50 }, Problem = SurrogateProblem, Elites = { Value = 0 } };
     113      var model = new GaussianProcessRegression { Problem = new RegressionProblem() };
     114      var strategies = new ItemSet<ISurrogateStrategy> { new InfillStrategy(), new LamarckianStrategy() };
     115
    126116      model.CovarianceFunctionParameter.Value = new CovarianceRationalQuadraticIso();
    127 
    128       Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "", new IntValue(int.MaxValue)));
    129       Parameters.Add(new FixedValueParameter<IntValue>(InitialEvaluationsParameterName, "", new IntValue(10)));
     117      Parameters.Add(new FixedValueParameter<IntValue>(InitialEvaluationsParameterName, "The initial number of evaluations performed before the first model is constructed", new IntValue(10)));
     118      Parameters.Add(new FixedValueParameter<IntValue>(MaximalDataSetSizeParameterName, "The maximum number of sample points used to generate the model. Set 0 or less to use always all samples ", new IntValue(-1)));
     119      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "The maximum number of evaluations performed", new IntValue(int.MaxValue)));
    130120      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeParameterName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(-1)));
     121      Parameters.Add(new ValueParameter<Algorithm>(OptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", geneticAlgorithm));
     122      Parameters.Add(new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>(RegressionAlgorithmParameterName, "The model used to approximate the problem", model));
     123      Parameters.Add(new FixedValueParameter<BoolValue>(RemoveDuplicatesParamterName, "Wether duplicate samples should be replaced by a single sample with an averaged quality. This GREATLY decreases the chance of ill conditioned models (unbuildable models) but is not theoretically sound as the model ignores the increasing certainty in this region"));
    131124      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
    132125      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
    133       Parameters.Add(new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>(RegressionAlgorithmParameterName, "The model used to approximate the problem", model));
    134       Parameters.Add(new ValueParameter<Algorithm>(OptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", geneticAlgorithm));
    135       Parameters.Add(new FixedValueParameter<IntValue>(MaximalDataSetSizeParameterName, "The maximum number of sample points used to generate the model. Set 0 or less to use always all samples ", new IntValue(-1)));
    136       Parameters.Add(new FixedValueParameter<BoolValue>(RemoveDuplicatesParamterName, "Wether duplicate samples should be replaced by a single sample with an averaged quality. This GREATLY decreases the chance of ill conditioned models (unbuildable models) but is not theoretically sound as the model ignores the increasing certainty in this region"));
    137       var strategies = new ItemSet<ISurrogateStrategy> { new GenerationalStrategy(), new IndividualStrategy() };
    138126      Parameters.Add(new ConstrainedValueParameter<ISurrogateStrategy>(StrategyParameterName, "The surrogate strategy that dictates how the optimization alg is assisted", strategies, strategies.First()));
    139127      RegisterEventhandlers();
    140128    }
    141129    #endregion
    142130
     131    public void SetInitialSamples(RealVector[] solutions, double[] qualities) {
     132      InitialSamples = solutions.Zip(qualities, (vector, d) => new Tuple<RealVector, double>(vector, d)).ToList();
     133    }
    143134    protected override void Initialize(CancellationToken cancellationToken) {
    144135      base.Initialize(cancellationToken);
    145       //encoding
    146136      var enc = Problem.Encoding as RealVectorEncoding;
    147137      if (enc == null) throw new ArgumentException("The SAPBA algorithm can only be applied to RealVectorEncodings");
    148 
    149       //random
    150138      if (SetSeedRandomly) SeedParameter.Value.Value = new System.Random().Next();
    151       Random.Reset(Seed);
    152 
    153       //initialize Strategy and Problem
    154139      SurrogateStrategy.Initialize(this);
    155       SurrogateStrategy.UpdateCancellation(cancellationToken);
    156       surrogateProblem.SetStrategy(SurrogateStrategy);
    157       surrogateProblem.SetProblem(Problem);
     140      SurrogateProblem.Initialize(Problem, SurrogateStrategy);
    158141    }
    159142    protected override void Run(CancellationToken cancellationToken) {
    160       SurrogateStrategy.UpdateCancellation(cancellationToken);
    161       try { EgoUtilities.SyncRunSubAlgorithm(OptimizationAlgorithm, Random.Next()); }
    162       finally { Analyze(); }
     143      try { SapbaUtilities.SyncRunSubAlgorithm(OptimizationAlgorithm, Seed); }
     144      finally {
     145        foreach (var surrogateStrategy in StrategyParameter.ValidValues) surrogateStrategy.Initialize(null);
     146        OptimizationAlgorithm.Runs.Clear();
     147      }
    163148    }
    164     private void Analyze() { }
    165149
    166150    #region Eventhandling
    167151    private void RegisterEventhandlers() {
     
    172156      OptimizationAlgorithmParameter.ValueChanged -= OnOptimizationAlgorithmChanged;
    173157    }
    174158    private void OnOptimizationAlgorithmChanged(object sender, EventArgs e) {
    175       OptimizationAlgorithm.Problem = surrogateProblem;
     159      OptimizationAlgorithm.Problem = SurrogateProblem;
    176160    }
    177161
    178162    protected override void OnExecutionTimeChanged() {
     
    179163      base.OnExecutionTimeChanged();
    180164      if (CancellationTokenSource == null) return;
    181165      if (MaximumRuntime == -1) return;
    182       if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
     166      if (ExecutionTime.TotalSeconds > MaximumRuntime) OptimizationAlgorithm.Stop();
    183167    }
    184168    public override void Pause() {
    185169      base.Pause();
    186       if (RegressionAlgorithm.ExecutionState == ExecutionState.Started) RegressionAlgorithm.Pause();
    187       if (OptimizationAlgorithm.ExecutionState == ExecutionState.Started) OptimizationAlgorithm.Pause();
    188 
     170      if (RegressionAlgorithm.ExecutionState == ExecutionState.Started) RegressionAlgorithm.Stop();
     171      if (OptimizationAlgorithm.ExecutionState == ExecutionState.Started) OptimizationAlgorithm.Stop();
    189172    }
    190173    public override void Stop() {
    191174      base.Stop();
    192       if (RegressionAlgorithm.ExecutionState == ExecutionState.Started) RegressionAlgorithm.Stop();
    193       if (OptimizationAlgorithm.ExecutionState == ExecutionState.Started) OptimizationAlgorithm.Stop();
     175      //if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
     176      if (OptimizationAlgorithm.ExecutionState == ExecutionState.Started || OptimizationAlgorithm.ExecutionState == ExecutionState.Paused) OptimizationAlgorithm.Stop();
    194177    }
    195     protected override void OnProblemChanged() {
    196       base.OnProblemChanged();
    197       surrogateProblem.SetProblem(Problem);
    198     }
    199178    #endregion
    200179
    201180  }
Note: See TracBrowser for help on using the repository browser.