Changeset 14666
- Timestamp:
- 02/11/17 01:06:37 (8 years ago)
- Location:
- branches/PerformanceComparison
- Files:
-
- 1 added
- 7 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/Binary/SolutionModel/Univariate/UnivariateSolutionModel.cs
r14450 r14666 110 110 111 111 public static ISolutionModel<BinaryVector> CreateWithFitnessBias(IRandom random, bool maximization, IEnumerable<BinaryVector> population, IEnumerable<double> qualities) { 112 var proportions = RandomEnumerable.PrepareProportional(qualities, true, !maximization);112 var proportions = Util.Auxiliary.PrepareProportional(qualities, true, !maximization); 113 113 var factor = 1.0 / proportions.Sum(); 114 114 double[] model = null; -
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/HeuristicLab.Algorithms.MemPR-3.3.csproj
r14552 r14666 18 18 <DebugType>full</DebugType> 19 19 <Optimize>false</Optimize> 20 <OutputPath>..\..\ bin\</OutputPath>20 <OutputPath>..\..\..\..\trunk\sources\bin\</OutputPath> 21 21 <DefineConstants>DEBUG;TRACE</DefineConstants> 22 22 <ErrorReport>prompt</ErrorReport> … … 26 26 <DebugType>pdbonly</DebugType> 27 27 <Optimize>true</Optimize> 28 <OutputPath>..\..\ bin\</OutputPath>28 <OutputPath>..\..\..\..\trunk\sources\bin\</OutputPath> 29 29 <DefineConstants>TRACE</DefineConstants> 30 30 <ErrorReport>prompt</ErrorReport> … … 39 39 <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'"> 40 40 <DebugSymbols>true</DebugSymbols> 41 <OutputPath>..\..\ bin\</OutputPath>41 <OutputPath>..\..\..\..\trunk\sources\bin\</OutputPath> 42 42 <DefineConstants>DEBUG;TRACE</DefineConstants> 43 43 <DebugType>full</DebugType> … … 47 47 </PropertyGroup> 48 48 <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'"> 49 <OutputPath>..\..\ bin\</OutputPath>49 <OutputPath>..\..\..\..\trunk\sources\bin\</OutputPath> 50 50 <DefineConstants>TRACE</DefineConstants> 51 51 <Optimize>true</Optimize> … … 57 57 <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'"> 58 58 <DebugSymbols>true</DebugSymbols> 59 <OutputPath>..\..\ bin\</OutputPath>59 <OutputPath>..\..\..\..\trunk\sources\bin\</OutputPath> 60 60 <DefineConstants>DEBUG;TRACE</DefineConstants> 61 61 <DebugType>full</DebugType> … … 65 65 </PropertyGroup> 66 66 <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'"> 67 <OutputPath>..\..\ bin\</OutputPath>67 <OutputPath>..\..\..\..\trunk\sources\bin\</OutputPath> 68 68 <DefineConstants>TRACE</DefineConstants> 69 69 <Optimize>true</Optimize> … … 74 74 </PropertyGroup> 75 75 <ItemGroup> 76 <Reference Include="ALGLIB-3.7.0, Version=3.7.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 77 <SpecificVersion>False</SpecificVersion> 78 <HintPath>..\..\bin\ALGLIB-3.7.0.dll</HintPath> 76 <Reference Include="ALGLIB-3.7.0"> 77 <HintPath>..\..\..\..\trunk\sources\bin\ALGLIB-3.7.0.dll</HintPath> 78 <Private>False</Private> 79 </Reference> 80 <Reference Include="HeuristicLab.Algorithms.DataAnalysis-3.4"> 81 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.DataAnalysis-3.4.dll</HintPath> 82 <Private>False</Private> 83 </Reference> 84 <Reference Include="HeuristicLab.Collections-3.3"> 85 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Collections-3.3.dll</HintPath> 86 <Private>False</Private> 87 </Reference> 88 <Reference Include="HeuristicLab.Common-3.3"> 89 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath> 90 <Private>False</Private> 91 </Reference> 92 <Reference Include="HeuristicLab.Common.Resources-3.3"> 93 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Common.Resources-3.3.dll</HintPath> 94 <Private>False</Private> 95 </Reference> 96 <Reference Include="HeuristicLab.Core-3.3"> 97 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Core-3.3.dll</HintPath> 98 <Private>False</Private> 99 </Reference> 100 <Reference Include="HeuristicLab.Data-3.3"> 101 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Data-3.3.dll</HintPath> 102 <Private>False</Private> 103 </Reference> 104 <Reference Include="HeuristicLab.Encodings.BinaryVectorEncoding-3.3"> 105 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Encodings.BinaryVectorEncoding-3.3.dll</HintPath> 106 <Private>False</Private> 107 </Reference> 108 <Reference Include="HeuristicLab.Encodings.LinearLinkageEncoding-3.4"> 109 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Encodings.LinearLinkageEncoding-3.4.dll</HintPath> 110 <Private>False</Private> 111 </Reference> 112 <Reference Include="HeuristicLab.Operators-3.3"> 113 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Operators-3.3.dll</HintPath> 114 <Private>False</Private> 115 </Reference> 116 <Reference Include="HeuristicLab.Optimization.Operators-3.3"> 117 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Optimization.Operators-3.3.dll</HintPath> 118 <Private>False</Private> 119 </Reference> 120 <Reference Include="HeuristicLab.Parameters-3.3"> 121 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Parameters-3.3.dll</HintPath> 122 <Private>False</Private> 123 </Reference> 124 <Reference Include="HeuristicLab.Persistence-3.3"> 125 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Persistence-3.3.dll</HintPath> 126 <Private>False</Private> 127 </Reference> 128 <Reference Include="HeuristicLab.PluginInfrastructure-3.3"> 129 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.PluginInfrastructure-3.3.dll</HintPath> 130 <Private>False</Private> 131 </Reference> 132 <Reference Include="HeuristicLab.Problems.DataAnalysis-3.4"> 133 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.DataAnalysis-3.4.dll</HintPath> 134 <Private>False</Private> 135 </Reference> 136 <Reference Include="HeuristicLab.Random-3.3"> 137 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath> 79 138 <Private>False</Private> 80 139 </Reference> … … 128 187 <Compile Include="Plugin.cs" /> 129 188 <Compile Include="Properties\AssemblyInfo.cs" /> 189 <Compile Include="Util\Auxiliary.cs" /> 130 190 <Compile Include="Util\CkMeans1D.cs" /> 131 191 <Compile Include="Util\FitnessComparer.cs" /> 132 192 </ItemGroup> 133 193 <ItemGroup> 134 <ProjectReference Include="..\..\HeuristicLab.Algorithms.DataAnalysis\3.4\HeuristicLab.Algorithms.DataAnalysis-3.4.csproj">135 <Project>{2e782078-fa81-4b70-b56f-74ce38dac6c8}</Project>136 <Name>HeuristicLab.Algorithms.DataAnalysis-3.4</Name>137 <Private>False</Private>138 </ProjectReference>139 194 <ProjectReference Include="..\..\HeuristicLab.Analysis\3.3\HeuristicLab.Analysis-3.3.csproj"> 140 195 <Project>{887425b4-4348-49ed-a457-b7d2c26ddbf9}</Project> 141 196 <Name>HeuristicLab.Analysis-3.3</Name> 142 <Private>False</Private>143 </ProjectReference>144 <ProjectReference Include="..\..\HeuristicLab.Collections\3.3\HeuristicLab.Collections-3.3.csproj">145 <Project>{958b43bc-cc5c-4fa2-8628-2b3b01d890b6}</Project>146 <Name>HeuristicLab.Collections-3.3</Name>147 <Private>False</Private>148 </ProjectReference>149 <ProjectReference Include="..\..\HeuristicLab.Common.Resources\3.3\HeuristicLab.Common.Resources-3.3.csproj">150 <Project>{0e27a536-1c4a-4624-a65e-dc4f4f23e3e1}</Project>151 <Name>HeuristicLab.Common.Resources-3.3</Name>152 <Private>False</Private>153 </ProjectReference>154 <ProjectReference Include="..\..\HeuristicLab.Common\3.3\HeuristicLab.Common-3.3.csproj">155 <Project>{a9ad58b9-3ef9-4cc1-97e5-8d909039ff5c}</Project>156 <Name>HeuristicLab.Common-3.3</Name>157 <Private>False</Private>158 </ProjectReference>159 <ProjectReference Include="..\..\HeuristicLab.Core\3.3\HeuristicLab.Core-3.3.csproj">160 <Project>{c36bd924-a541-4a00-afa8-41701378ddc5}</Project>161 <Name>HeuristicLab.Core-3.3</Name>162 <Private>False</Private>163 </ProjectReference>164 <ProjectReference Include="..\..\HeuristicLab.Data\3.3\HeuristicLab.Data-3.3.csproj">165 <Project>{bbab9df5-5ef3-4ba8-ade9-b36e82114937}</Project>166 <Name>HeuristicLab.Data-3.3</Name>167 <Private>False</Private>168 </ProjectReference>169 <ProjectReference Include="..\..\HeuristicLab.Encodings.BinaryVectorEncoding\3.3\HeuristicLab.Encodings.BinaryVectorEncoding-3.3.csproj">170 <Project>{66d249c3-a01d-42a8-82a2-919bc8ec3d83}</Project>171 <Name>HeuristicLab.Encodings.BinaryVectorEncoding-3.3</Name>172 <Private>False</Private>173 </ProjectReference>174 <ProjectReference Include="..\..\HeuristicLab.Encodings.LinearLinkageEncoding\3.4\HeuristicLab.Encodings.LinearLinkageEncoding-3.4.csproj">175 <Project>{166507c9-ef26-4370-bb80-699742a29d4f}</Project>176 <Name>HeuristicLab.Encodings.LinearLinkageEncoding-3.4</Name>177 197 <Private>False</Private> 178 198 </ProjectReference> … … 182 202 <Private>False</Private> 183 203 </ProjectReference> 184 <ProjectReference Include="..\..\HeuristicLab.Operators\3.3\HeuristicLab.Operators-3.3.csproj">185 <Project>{23da7ff4-d5b8-41b6-aa96-f0561d24f3ee}</Project>186 <Name>HeuristicLab.Operators-3.3</Name>187 <Private>False</Private>188 </ProjectReference>189 <ProjectReference Include="..\..\HeuristicLab.Optimization.Operators\3.3\HeuristicLab.Optimization.Operators-3.3.csproj">190 <Project>{25087811-F74C-4128-BC86-8324271DA13E}</Project>191 <Name>HeuristicLab.Optimization.Operators-3.3</Name>192 <Private>False</Private>193 </ProjectReference>194 204 <ProjectReference Include="..\..\HeuristicLab.Optimization\3.3\HeuristicLab.Optimization-3.3.csproj"> 195 205 <Project>{14ab8d24-25bc-400c-a846-4627aa945192}</Project> 196 206 <Name>HeuristicLab.Optimization-3.3</Name> 197 <Private>False</Private>198 </ProjectReference>199 <ProjectReference Include="..\..\HeuristicLab.Parameters\3.3\HeuristicLab.Parameters-3.3.csproj">200 <Project>{56f9106a-079f-4c61-92f6-86a84c2d84b7}</Project>201 <Name>HeuristicLab.Parameters-3.3</Name>202 <Private>False</Private>203 </ProjectReference>204 <ProjectReference Include="..\..\HeuristicLab.Persistence\3.3\HeuristicLab.Persistence-3.3.csproj">205 <Project>{102bc7d3-0ef9-439c-8f6d-96ff0fdb8e1b}</Project>206 <Name>HeuristicLab.Persistence-3.3</Name>207 <Private>False</Private>208 </ProjectReference>209 <ProjectReference Include="..\..\HeuristicLab.PluginInfrastructure\3.3\HeuristicLab.PluginInfrastructure-3.3.csproj">210 <Project>{94186a6a-5176-4402-ae83-886557b53cca}</Project>211 <Name>HeuristicLab.PluginInfrastructure-3.3</Name>212 <Private>False</Private>213 </ProjectReference>214 <ProjectReference Include="..\..\HeuristicLab.Problems.DataAnalysis\3.4\HeuristicLab.Problems.DataAnalysis-3.4.csproj">215 <Project>{df87c13e-a889-46ff-8153-66dcaa8c5674}</Project>216 <Name>HeuristicLab.Problems.DataAnalysis-3.4</Name>217 </ProjectReference>218 <ProjectReference Include="..\..\HeuristicLab.Random\3.3\HeuristicLab.Random-3.3.csproj">219 <Project>{f4539fb6-4708-40c9-be64-0a1390aea197}</Project>220 <Name>HeuristicLab.Random-3.3</Name>221 207 <Private>False</Private> 222 208 </ProjectReference> -
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/MemPRAlgorithm.cs
r14573 r14666 248 248 Context.AddToPopulation(child); 249 249 Context.BestQuality = child.Fitness; 250 Analyze( token);250 Analyze(CancellationToken.None); 251 251 token.ThrowIfCancellationRequested(); 252 252 if (Terminate()) return; … … 398 398 } else res.Value = sp; 399 399 400 if (Context.BreedingPerformanceModel != null) {401 var sol = Context.GetSolution(Context.BreedingPerformanceModel, Context.BreedingStat);402 if (!Results.TryGetValue("Breeding Performance", out res)) {403 Results.Add(new Result("Breeding Performance", sol));404 } else res.Value = sol;405 }406 if (Context.RelinkingPerformanceModel != null) {407 var sol = Context.GetSolution(Context.RelinkingPerformanceModel, Context.RelinkingStat);408 if (!Results.TryGetValue("Relinking Performance", out res)) {409 Results.Add(new Result("Relinking Performance", sol));410 } else res.Value = sol;411 }412 if (Context.DelinkingPerformanceModel != null) {413 var sol = Context.GetSolution(Context.DelinkingPerformanceModel, Context.DelinkingStat);414 if (!Results.TryGetValue("Delinking Performance", out res)) {415 Results.Add(new Result("Delinking Performance", sol));416 } else res.Value = sol;417 }418 if (Context.SamplingPerformanceModel != null) {419 var sol = Context.GetSolution(Context.SamplingPerformanceModel, Context.SamplingStat);420 if (!Results.TryGetValue("Sampling Performance", out res)) {421 Results.Add(new Result("Sampling Performance", sol));422 } else res.Value = sol;423 }424 if (Context.HillclimbingPerformanceModel != null) {425 var sol = Context.GetSolution(Context.HillclimbingPerformanceModel, Context.HillclimbingStat);426 if (!Results.TryGetValue("Hillclimbing Performance", out res)) {427 Results.Add(new Result("Hillclimbing Performance", sol));428 } else res.Value = sol;429 }430 if (Context.AdaptiveWalkPerformanceModel != null) {431 var sol = Context.GetSolution(Context.AdaptiveWalkPerformanceModel, Context.AdaptivewalkingStat);432 if (!Results.TryGetValue("Adaptivewalk Performance", out res)) {433 Results.Add(new Result("Adaptivewalk Performance", sol));434 } else res.Value = sol;435 }436 437 400 Context.RunOperator(Analyzer, Context.Scope, token); 438 401 } -
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs
r14573 r14666 191 191 get { return scope.SubScopes.Count; } 192 192 } 193 194 [Storable] 195 private IConfidenceRegressionModel breedingPerformanceModel; 196 public IConfidenceRegressionModel BreedingPerformanceModel { 197 get { return breedingPerformanceModel; } 198 } 193 199 194 [Storable] 200 195 private List<Tuple<double, double, double, double>> breedingStat; … … 203 198 } 204 199 [Storable] 205 private IConfidenceRegressionModel relinkingPerformanceModel;206 public IConfidenceRegressionModel RelinkingPerformanceModel {207 get { return relinkingPerformanceModel; }208 }209 [Storable]210 200 private List<Tuple<double, double, double, double>> relinkingStat; 211 201 public IEnumerable<Tuple<double, double, double, double>> RelinkingStat { … … 213 203 } 214 204 [Storable] 215 private IConfidenceRegressionModel delinkingPerformanceModel;216 public IConfidenceRegressionModel DelinkingPerformanceModel {217 get { return delinkingPerformanceModel; }218 }219 [Storable]220 205 private List<Tuple<double, double, double, double>> delinkingStat; 221 206 public IEnumerable<Tuple<double, double, double, double>> DelinkingStat { … … 223 208 } 224 209 [Storable] 225 private IConfidenceRegressionModel samplingPerformanceModel;226 public IConfidenceRegressionModel SamplingPerformanceModel {227 get { return samplingPerformanceModel; }228 }229 [Storable]230 210 private List<Tuple<double, double>> samplingStat; 231 211 public IEnumerable<Tuple<double, double>> SamplingStat { … … 233 213 } 234 214 [Storable] 235 private IConfidenceRegressionModel hillclimbingPerformanceModel;236 public IConfidenceRegressionModel HillclimbingPerformanceModel {237 get { return hillclimbingPerformanceModel; }238 }239 [Storable]240 215 private List<Tuple<double, double>> hillclimbingStat; 241 216 public IEnumerable<Tuple<double, double>> HillclimbingStat { 242 217 get { return hillclimbingStat; } 243 }244 [Storable]245 private IConfidenceRegressionModel adaptiveWalkPerformanceModel;246 public IConfidenceRegressionModel AdaptiveWalkPerformanceModel {247 get { return adaptiveWalkPerformanceModel; }248 218 } 249 219 [Storable] … … 276 246 byAdaptivewalking = cloner.Clone(original.byAdaptivewalking); 277 247 random = cloner.Clone(original.random); 278 breedingPerformanceModel = cloner.Clone(original.breedingPerformanceModel);279 248 breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList(); 280 relinkingPerformanceModel = cloner.Clone(original.relinkingPerformanceModel);281 249 relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList(); 282 delinkingPerformanceModel = cloner.Clone(original.delinkingPerformanceModel);283 250 delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList(); 284 samplingPerformanceModel = cloner.Clone(original.samplingPerformanceModel);285 251 samplingStat = original.samplingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList(); 286 hillclimbingPerformanceModel = cloner.Clone(original.hillclimbingPerformanceModel);287 252 hillclimbingStat = original.hillclimbingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList(); 288 adaptiveWalkPerformanceModel = cloner.Clone(original.adaptiveWalkPerformanceModel);289 253 adaptivewalkingStat = original.adaptivewalkingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList(); 290 254 … … 349 313 breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness)); 350 314 else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness)); 351 if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();352 }353 public void RelearnBreedingPerformanceModel() {354 breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model;355 315 } 356 316 public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) { 357 if (breedingPerformanceModel == null) return true; 358 double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue; 359 foreach (var d in BreedingStat) { 360 if (d.Item1 < minI1) minI1 = d.Item1; 361 if (d.Item1 > maxI1) maxI1 = d.Item1; 362 if (d.Item2 < minI2) minI2 = d.Item2; 363 if (d.Item2 > maxI2) maxI2 = d.Item2; 364 } 365 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 366 return true; 367 368 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel); 317 return true; 369 318 } 370 319 #endregion … … 375 324 relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness)); 376 325 else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness)); 377 if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();378 }379 public void RelearnRelinkingPerformanceModel() {380 relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model;381 326 } 382 327 public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) { 383 if (relinkingPerformanceModel == null) return true; 384 double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue; 385 foreach (var d in RelinkingStat) { 386 if (d.Item1 < minI1) minI1 = d.Item1; 387 if (d.Item1 > maxI1) maxI1 = d.Item1; 388 if (d.Item2 < minI2) minI2 = d.Item2; 389 if (d.Item2 > maxI2) maxI2 = d.Item2; 390 } 391 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 392 return true; 393 394 if (IsBetter(p1, p2)) { 395 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel); 396 } 397 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel); 328 return true; 398 329 } 399 330 #endregion … … 404 335 delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness)); 405 336 else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness)); 406 if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();407 }408 public void RelearnDelinkingPerformanceModel() {409 delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model;410 337 } 411 338 public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) { 412 if (delinkingPerformanceModel == null) return true; 413 double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue; 414 foreach (var d in DelinkingStat) { 415 if (d.Item1 < minI1) minI1 = d.Item1; 416 if (d.Item1 > maxI1) maxI1 = d.Item1; 417 if (d.Item2 < minI2) minI2 = d.Item2; 418 if (d.Item2 > maxI2) maxI2 = d.Item2; 419 } 420 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 421 return true; 422 if (IsBetter(p1, p2)) { 423 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel); 424 } 425 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel); 339 return true; 426 340 } 427 341 #endregion … … 430 344 public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) { 431 345 samplingStat.Add(Tuple.Create(avgDist, sample.Fitness)); 432 if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();433 }434 public void RelearnSamplingPerformanceModel() {435 samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model;436 346 } 437 347 public bool SamplingSuited(double avgDist) { 438 if (samplingPerformanceModel == null) return true; 439 if (avgDist < samplingStat.Min(x => x.Item1) || avgDist > samplingStat.Max(x => x.Item1)) return true; 440 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel); 348 return true; 441 349 } 442 350 #endregion … … 445 353 public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 446 354 hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness)); 447 if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();448 }449 public void RelearnHillclimbingPerformanceModel() {450 hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model;451 355 } 452 356 public bool HillclimbingSuited(double startingFitness) { 453 if (hillclimbingPerformanceModel == null) return true; 454 if (startingFitness < HillclimbingStat.Min(x => x.Item1) || startingFitness > HillclimbingStat.Max(x => x.Item1)) 455 return true; 456 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel); 357 return true; 457 358 } 458 359 #endregion … … 461 362 public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 462 363 adaptivewalkingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness)); 463 if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();464 }465 public void RelearnAdaptiveWalkPerformanceModel() {466 adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model;467 364 } 468 365 public bool AdaptivewalkingSuited(double startingFitness) { 469 if (adaptiveWalkPerformanceModel == null) return true; 470 if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1)) 471 return true; 472 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, adaptiveWalkPerformanceModel); 473 } 474 #endregion 475 476 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) { 477 return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList()))); 478 } 479 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double>> data) { 480 return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList()))); 481 } 482 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double, double>> data) { 483 return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList()))); 484 } 485 486 protected RegressionProblemData PrepareRegression(List<List<double>> data) { 487 var columns = data.First().Select(y => new List<double>()).ToList(); 488 foreach (var next in data.Shuffle(Random)) { 489 for (var i = 0; i < next.Count; i++) { 490 columns[i].Add(next[i]); 491 } 492 } 493 var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns); 494 var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") { 495 TrainingPartition = { Start = 0, End = Math.Min(50, data.Count) }, 496 TestPartition = { Start = Math.Min(50, data.Count), End = data.Count } 497 }; 498 return regPrb; 499 } 500 501 protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) { 502 var targetValues = trainingData.Dataset.GetDoubleValues(trainingData.TargetVariable, trainingData.TrainingIndices).ToList(); 503 var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null; 504 var constantSolution = new ConfidenceRegressionSolution(new ConfidenceConstantModel(targetValues.Average(), targetValues.Variance(), trainingData.TargetVariable), trainingData); 505 var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } }; 506 if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) { 507 gpr.MeanFunction = new MeanZero(); 508 var cov1 = new CovarianceSum(); 509 cov1.Terms.Add(new CovarianceLinearArd()); 510 cov1.Terms.Add(new CovarianceConst()); 511 gpr.CovarianceFunction = cov1; 512 } 513 IConfidenceRegressionSolution solution = null; 514 var cnt = 0; 515 do { 516 ExecuteAlgorithm(gpr); 517 solution = (IConfidenceRegressionSolution)gpr.Results["Solution"].Value; 518 cnt++; 519 } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0))); 520 521 return GetBestRegressionSolution(constantSolution, baseline, solution); 522 } 523 524 private static IConfidenceRegressionSolution GetBestRegressionSolution(IConfidenceRegressionSolution constant, IConfidenceRegressionSolution baseline, IConfidenceRegressionSolution solution) { 525 if (baseline == null) 526 return constant.TrainingMeanAbsoluteError < solution.TrainingMeanAbsoluteError ? constant : solution; 527 528 double a, b, c; 529 if (constant.ProblemData.Dataset.Rows < 60) { 530 c = constant.TrainingMeanAbsoluteError; 531 b = baseline.TrainingMeanAbsoluteError; 532 a = solution.TrainingMeanAbsoluteError; 533 } else { 534 c = constant.TestMeanAbsoluteError; 535 b = baseline.TestMeanAbsoluteError; 536 a = solution.TestMeanAbsoluteError; 537 } 538 if (c < b && (c < a || b < a)) return constant; 539 if (b < c && (b < a || c < a)) return baseline; 540 return solution; 541 } 542 543 protected static void ExecuteAlgorithm(IAlgorithm algorithm) { 544 using (var evt = new AutoResetEvent(false)) { 545 EventHandler exeStateChanged = (o, args) => { 546 if (algorithm.ExecutionState != ExecutionState.Started) 547 evt.Set(); 548 }; 549 algorithm.ExecutionStateChanged += exeStateChanged; 550 if (algorithm.ExecutionState != ExecutionState.Prepared) { 551 algorithm.Prepare(true); 552 evt.WaitOne(); 553 } 554 algorithm.Start(); 555 evt.WaitOne(); 556 algorithm.ExecutionStateChanged -= exeStateChanged; 557 } 558 } 559 560 private double ProbabilityAcceptAbsolutePerformanceModel(List<double> inputs, IConfidenceRegressionModel model) { 561 var inputVariables = inputs.Select((v, i) => "in" + i); 562 var ds = new Dataset(inputVariables.Concat( new [] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new [] { new List<double> { double.NaN } })); 563 var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single(); 564 var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single()); 565 566 // calculate the fitness goal 567 var goal = Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness); 568 var z = (goal - mean) / sdev; 569 // return the probability of achieving or surpassing that goal 570 var y = alglib.invnormaldistribution(z); 571 return Maximization ? 1.0 - y /* P(X >= z) */ : y; // P(X <= z) 572 } 573 574 private double ProbabilityAcceptRelativePerformanceModel(double basePerformance, List<double> inputs, IConfidenceRegressionModel model) { 575 var inputVariables = inputs.Select((v, i) => "in" + i); 576 var ds = new Dataset(inputVariables.Concat(new[] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new[] { new List<double> { double.NaN } })); 577 var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single(); 578 var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single()); 579 580 // calculate the improvement goal 581 var goal = Maximization ? Population.Min(x => x.Fitness) - basePerformance : basePerformance - Population.Max(x => x.Fitness); 582 var z = (goal - mean) / sdev; 583 // return the probability of achieving or surpassing that goal 584 return 1.0 - alglib.invnormaldistribution(z); /* P(X >= z) */ 585 } 586 587 private static List<List<double>> ToListRow(List<Tuple<double, double>> rows) { 588 return rows.Select(x => new List<double> { x.Item1, x.Item2 }).ToList(); 589 } 590 private static List<List<double>> ToListRow(List<Tuple<double, double, double>> rows) { 591 return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3 }).ToList(); 592 } 593 private static List<List<double>> ToListRow(List<Tuple<double, double, double, double>> rows) { 594 return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3, x.Item4 }).ToList(); 595 } 366 return true; 367 } 368 #endregion 596 369 597 370 [MethodImpl(MethodImplOptions.AggressiveInlining)] -
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/Permutation/SolutionModel/Univariate/UnivariateAbsoluteModel.cs
r14496 r14666 121 121 122 122 public static UnivariateAbsoluteModel CreateWithFitnessBias(IRandom random, bool maximization, IList<Encodings.PermutationEncoding.Permutation> population, IEnumerable<double> qualities, int N) { 123 var proportions = RandomEnumerable.PrepareProportional(qualities, true, !maximization);123 var proportions = Util.Auxiliary.PrepareProportional(qualities, true, !maximization); 124 124 var factor = 1.0 / proportions.Sum(); 125 125 var model = new double[N, N]; -
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/Permutation/SolutionModel/Univariate/UnivariateRelativeModel.cs
r14496 r14666 126 126 127 127 public static UnivariateRelativeModel CreateDirectedWithFitnessBias(IRandom random, bool maximization, IList<Encodings.PermutationEncoding.Permutation> population, IEnumerable<double> qualities, int N) { 128 var proportions = RandomEnumerable.PrepareProportional(qualities, true, !maximization);128 var proportions = Util.Auxiliary.PrepareProportional(qualities, true, !maximization); 129 129 var factor = 1.0 / proportions.Sum(); 130 130 var model = new double[N, N]; … … 180 180 181 181 public static UnivariateRelativeModel CreateUndirectedWithFitnessBias(IRandom random, bool maximization, IList<Encodings.PermutationEncoding.Permutation> population, IEnumerable<double> qualities, int N) { 182 var proportions = RandomEnumerable.PrepareProportional(qualities, true, !maximization);182 var proportions = Util.Auxiliary.PrepareProportional(qualities, true, !maximization); 183 183 var factor = 1.0 / proportions.Sum(); 184 184 var model = new double[N, N]; -
branches/PerformanceComparison/PerformanceComparison.sln
r14059 r14666 1 1 2 2 Microsoft Visual Studio Solution File, Format Version 12.00 3 # Visual Studio 20134 VisualStudioVersion = 1 2.0.40629.03 # Visual Studio 14 4 VisualStudioVersion = 14.0.25420.1 5 5 MinimumVisualStudioVersion = 10.0.40219.1 6 6 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "HeuristicLab.Analysis-3.3", "HeuristicLab.Analysis\3.3\HeuristicLab.Analysis-3.3.csproj", "{887425B4-4348-49ED-A457-B7D2C26DDBF9}" … … 19 19 EndProject 20 20 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "HeuristicLab.Encodings.PermutationEncoding-3.3", "HeuristicLab.Encodings.PermutationEncoding\3.3\HeuristicLab.Encodings.PermutationEncoding-3.3.csproj", "{DBECB8B0-B166-4133-BAF1-ED67C3FD7FCA}" 21 EndProject 22 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "HeuristicLab.Algorithms.MemPR-3.3", "HeuristicLab.Algorithms.MemPR\3.3\HeuristicLab.Algorithms.MemPR-3.3.csproj", "{9D274421-6332-4FBC-AAE4-467ACE27C368}" 21 23 EndProject 22 24 Global … … 126 128 {DBECB8B0-B166-4133-BAF1-ED67C3FD7FCA}.Release|x86.ActiveCfg = Release|x86 127 129 {DBECB8B0-B166-4133-BAF1-ED67C3FD7FCA}.Release|x86.Build.0 = Release|x86 130 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 131 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Debug|Any CPU.Build.0 = Debug|Any CPU 132 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Debug|x64.ActiveCfg = Debug|x64 133 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Debug|x64.Build.0 = Debug|x64 134 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Debug|x86.ActiveCfg = Debug|x86 135 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Debug|x86.Build.0 = Debug|x86 136 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Release|Any CPU.ActiveCfg = Release|Any CPU 137 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Release|Any CPU.Build.0 = Release|Any CPU 138 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Release|x64.ActiveCfg = Release|x64 139 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Release|x64.Build.0 = Release|x64 140 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Release|x86.ActiveCfg = Release|x86 141 {9D274421-6332-4FBC-AAE4-467ACE27C368}.Release|x86.Build.0 = Release|x86 128 142 EndGlobalSection 129 143 GlobalSection(SolutionProperties) = preSolution
Note: See TracChangeset
for help on using the changeset viewer.