#region License Information /* HeuristicLab * Copyright (C) 2002-2010 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Linq; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; using HeuristicLab.Optimization; using System.Drawing; using HeuristicLab.Collections; using System.Collections.Generic; using HeuristicLab.Hive.Contracts.BusinessObjects; using System.IO; using HeuristicLab.Persistence.Default.Xml; using HeuristicLab.PluginInfrastructure; using System.Reflection; using HeuristicLab.Hive.Contracts.Interfaces; using HeuristicLab.Hive.Contracts; using System.Threading; using HeuristicLab.Tracing; using HeuristicLab.Hive.JobBase; using System.Diagnostics; using System.Collections; using System.ServiceModel; namespace HeuristicLab.Hive.Experiment { /// /// An experiment which contains multiple batch runs of algorithms. /// [Item(itemName, itemDescription)] [Creatable("Testing & Analysis")] [StorableClass] public class HiveExperiment : NamedItem, IExecutable { private const string itemName = "Hive Experiment"; private const string itemDescription = "A runner for a single experiment, which's algorithms are executed in the Hive."; private const int resultPollingIntervalMs = 5000; private const int snapshotPollingIntervalMs = 1000; private const int maxSnapshotRetries = 20; private object locker = new object(); private System.Timers.Timer timer; private bool pausePending, stopPending; private bool sendingJobsFinished = false; // ensure that only 2 threads can fetch jobresults simultaniously private Semaphore fetchJobSemaphore = new Semaphore(2, 2); private bool stopResultsPollingPending = false; private Thread resultPollingThread; private bool isPollingResults; public bool IsPollingResults { get { return isPollingResults; } private set { if (isPollingResults != value) { isPollingResults = value; OnIsPollingResultsChanged(); } } } public IEnumerable ResourceGroups { get { if (!string.IsNullOrEmpty(resourceIds)) { return resourceIds.Split(';'); } else { return new List(); } } } #region Storable Properties [Storable] private DateTime lastUpdateTime; /// /// Mapping from JobId to an optimizer. /// Stores all pending optimizers. If an optimizer is finished it is removed from this collection /// [Storable] private IDictionary pendingOptimizersByJobId = new Dictionary(); /// /// Stores a mapping from the child-optimizer to the parent optimizer. /// Needed to replace a finished optimizer in the optimizer-tree. /// Only pending optmizers are stored. /// [Storable] private IDictionary parentOptimizersByPendingOptimizer = new Dictionary(); [Storable] private JobItemList jobItems; public JobItemList JobItems { get { return jobItems; } } [Storable] private string serverUrl; public string ServerUrl { get { return serverUrl; } set { if (serverUrl != value) { serverUrl = value; OnServerUrlChanged(); } } } [Storable] private string resourceIds; public string ResourceIds { get { return resourceIds; } set { if (resourceIds != value) { resourceIds = value; OnResourceIdsChanged(); } } } [Storable] private HeuristicLab.Optimization.Experiment experiment; public HeuristicLab.Optimization.Experiment Experiment { get { return experiment; } set { if (experiment != value) { experiment = value; OnExperimentChanged(); } } } [Storable] private ILog log; public ILog Log { get { return log; } } [Storable] private Core.ExecutionState executionState; public ExecutionState ExecutionState { get { return executionState; } private set { if (executionState != value) { executionState = value; OnExecutionStateChanged(); } } } [Storable] private TimeSpan executionTime; public TimeSpan ExecutionTime { get { return executionTime; } private set { if (executionTime != value) { executionTime = value; OnExecutionTimeChanged(); } } } #endregion [StorableConstructor] public HiveExperiment(bool deserializing) : base(deserializing) { } public HiveExperiment() : base(itemName, itemDescription) { this.ServerUrl = HeuristicLab.Hive.Experiment.Properties.Settings.Default.HiveServerUrl; this.ResourceIds = HeuristicLab.Hive.Experiment.Properties.Settings.Default.ResourceIds; this.log = new Log(); pausePending = stopPending = false; jobItems = new JobItemList(); isPollingResults = false; RegisterJobItemListEvents(); InitTimer(); } public override IDeepCloneable Clone(Cloner cloner) { LogMessage("I am beeing cloned"); HiveExperiment clone = (HiveExperiment)base.Clone(cloner); clone.resourceIds = this.resourceIds; clone.serverUrl = this.serverUrl; clone.experiment = (HeuristicLab.Optimization.Experiment)cloner.Clone(experiment); clone.executionState = this.executionState; clone.executionTime = this.executionTime; clone.pendingOptimizersByJobId = new Dictionary(); foreach (var pair in this.pendingOptimizersByJobId) clone.pendingOptimizersByJobId[pair.Key] = (IOptimizer)cloner.Clone(pair.Value); foreach (var pair in this.parentOptimizersByPendingOptimizer) clone.parentOptimizersByPendingOptimizer[(IOptimizer)cloner.Clone(pair.Key)] = (IOptimizer)cloner.Clone(pair.Value); clone.log = (ILog)cloner.Clone(log); clone.stopPending = this.stopPending; clone.pausePending = this.pausePending; clone.jobItems.AddRange((JobItemList)cloner.Clone(jobItems)); clone.lastUpdateTime = this.lastUpdateTime; clone.isPollingResults = this.isPollingResults; return clone; } [StorableHook(HookType.AfterDeserialization)] private void AfterDeserialization() { InitTimer(); this.IsPollingResults = false; this.stopResultsPollingPending = false; RegisterJobItemListEvents(); LogMessage("I was deserialized."); } #region Execution Time Timer private void InitTimer() { timer = new System.Timers.Timer(100); timer.AutoReset = true; timer.Elapsed += new System.Timers.ElapsedEventHandler(timer_Elapsed); } private void timer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) { DateTime now = DateTime.Now; ExecutionTime += now - lastUpdateTime; lastUpdateTime = now; } #endregion #region IExecutable Methods public void Pause() { throw new NotSupportedException(); } public void Prepare() { if (experiment != null) { StopResultPolling(); lock (pendingOptimizersByJobId) { pendingOptimizersByJobId.Clear(); } parentOptimizersByPendingOptimizer.Clear(); lock (jobItems) { jobItems.Clear(); } experiment.Prepare(); this.ExecutionState = Core.ExecutionState.Prepared; OnPrepared(); } } public void Start() { sendingJobsFinished = false; OnStarted(); ExecutionTime = new TimeSpan(); lastUpdateTime = DateTime.Now; this.ExecutionState = Core.ExecutionState.Started; StartResultPolling(); Thread t = new Thread(() => { IExecutionEngineFacade executionEngineFacade = GetExecutionEngineFacade(); try { pendingOptimizersByJobId = new Dictionary(); LogMessage("Extracting jobs from Experiment"); parentOptimizersByPendingOptimizer = GetOptimizers(true); LogMessage("Extraction of jobs from Experiment finished"); IEnumerable groups = ResourceGroups; foreach (IOptimizer optimizer in parentOptimizersByPendingOptimizer.Keys) { SerializedJob serializedJob = CreateSerializedJob(optimizer); ResponseObject response = executionEngineFacade.AddJobWithGroupStrings(serializedJob, groups); lock (pendingOptimizersByJobId) { pendingOptimizersByJobId.Add(response.Obj.Id, optimizer); } JobItem jobItem = new JobItem() { JobDto = response.Obj, LatestSnapshot = null, Optimizer = optimizer }; lock (jobItems) { jobItems.Add(jobItem); } LogMessage(jobItem.JobDto.Id, "Job sent to Hive"); } } catch (Exception e) { LogMessage("Error: Starting HiveExperiment failed: " + e.Message); this.ExecutionState = Core.ExecutionState.Stopped; OnStopped(); } sendingJobsFinished = true; }); t.Start(); } public void Stop() { this.ExecutionState = Core.ExecutionState.Stopped; foreach (JobItem jobItem in jobItems) { AbortJob(jobItem.JobDto.Id); } OnStopped(); } #endregion #region Optimizier Management /// /// Returns all optimizers in the current Experiment /// /// if false only top level optimizers are returned, if true the optimizer-tree is flatted /// private IDictionary GetOptimizers(bool flatout) { if (!flatout) { var optimizers = new Dictionary(); foreach (IOptimizer opt in experiment.Optimizers) { optimizers.Add(experiment, opt); } return optimizers; } else { return FlatOptimizerTree(null, experiment, ""); } } /// /// Recursively iterates all IOptimizers in the optimizer-tree and returns them. /// /// [chn] this could be implemented more cleanly if Experiment and BatchRun would implement an interface like: /// interface IParallelizable { /// IEnumerable<IOptimizer> GetOptimizers(); /// } /// /// a dictionary mapping from the parent optimizer to the child optimizer private IDictionary FlatOptimizerTree(IOptimizer parent, IOptimizer optimizer, string prepend) { IDictionary optimizers = new Dictionary(); if (optimizer is HeuristicLab.Optimization.Experiment) { HeuristicLab.Optimization.Experiment experiment = optimizer as HeuristicLab.Optimization.Experiment; if (this.experiment != experiment) { prepend += experiment.Name + "/"; // don't prepend for top-level optimizers } foreach (IOptimizer opt in experiment.Optimizers) { AddRange(optimizers, FlatOptimizerTree(experiment, opt, prepend)); } } else if (optimizer is BatchRun) { BatchRun batchRun = optimizer as BatchRun; prepend += batchRun.Name + "/"; for (int i = 0; i < batchRun.Repetitions; i++) { IOptimizer opt = (IOptimizer)batchRun.Algorithm.Clone(); opt.Name += " [" + i + "]"; IDictionary batchOptimizers = FlatOptimizerTree(batchRun, opt, prepend); AddRange(optimizers, batchOptimizers); } } else if (optimizer is EngineAlgorithm) { optimizer.Name = prepend + optimizer.Name; optimizers.Add(optimizer, parent); LogMessage("Optimizer extracted: " + optimizer.Name); } else { Logger.Warn("Optimizer of type " + optimizers.GetType().ToString() + " unknown"); optimizer.Name = prepend + optimizer.Name; optimizers.Add(optimizer, parent); LogMessage("Optimizer extracted: " + optimizer.Name); } return optimizers; } private void ReplaceOptimizer(IOptimizer parentOptimizer, IOptimizer originalOptimizer, IOptimizer newOptimizer) { lock (locker) { if (parentOptimizer is HeuristicLab.Optimization.Experiment) { HeuristicLab.Optimization.Experiment exp = (HeuristicLab.Optimization.Experiment)parentOptimizer; int originalOptimizerIndex = exp.Optimizers.IndexOf(originalOptimizer); exp.Optimizers[originalOptimizerIndex] = newOptimizer; } else if (parentOptimizer is BatchRun) { BatchRun batchRun = (BatchRun)parentOptimizer; if (newOptimizer is IAlgorithm) { batchRun.Runs.Add(new Run(newOptimizer.Name, (IAlgorithm)newOptimizer)); } else { throw new NotSupportedException("Only IAlgorithm types supported"); } } else { throw new NotSupportedException("Invalid parentOptimizer"); } } } private bool NoMorePendingOptimizers() { lock (pendingOptimizersByJobId) { return pendingOptimizersByJobId.Count == 0; } } /// /// Removes optimizers from /// - parentOptimizersByPendingOptimizer /// - pendingOptimizersByJobId /// /// private void DisposeOptimizerMappings(Guid jobId) { lock (pendingOptimizersByJobId) { LogMessage(jobId, "Disposing Optimizer Mappings"); parentOptimizersByPendingOptimizer.Remove(pendingOptimizersByJobId[jobId]); pendingOptimizersByJobId.Remove(jobId); } } #endregion #region Job Management /// /// Updates all JobItems with the results /// /// private void UpdateJobItems(JobResultList jobResultList) { // use a Dict to avoid quadratic runtime complexity IDictionary jobResultDict = jobResultList.ToDictionary(job => job.JobId); lock (jobItems) { foreach (JobItem jobItem in JobItems) { if (jobResultDict.ContainsKey(jobItem.JobDto.Id)) { jobItem.JobResult = jobResultDict[jobItem.JobDto.Id]; } } } } private void JobItem_JobStateChanged(object sender, EventArgs e) { JobItem jobItem = (JobItem)sender; Thread t = new Thread(() => { if (jobItem.State == State.Finished) { FetchAndUpdateJob(jobItem.JobDto.Id); DisposeOptimizerMappings(jobItem.JobDto.Id); } else if (jobItem.State == State.Failed) { DisposeOptimizerMappings(jobItem.JobDto.Id); } if (NoMorePendingOptimizers()) { StopResultPolling(); this.ExecutionState = Core.ExecutionState.Stopped; OnStopped(); } }); t.Start(); } /// /// Fetches the finished job from the server and updates the jobItem /// private void FetchAndUpdateJob(Guid jobId) { LogMessage(jobId, "FetchAndUpdateJob started"); IExecutionEngineFacade executionEngineFacade = GetExecutionEngineFacade(); IOptimizer originalOptimizer; lock (pendingOptimizersByJobId) { originalOptimizer = pendingOptimizersByJobId[jobId]; } fetchJobSemaphore.WaitOne(); ResponseObject jobResponse = executionEngineFacade.GetLastSerializedResult(jobId, false, false); IJob restoredObject = XmlParser.Deserialize(new MemoryStream(jobResponse.Obj.SerializedJobData)); IOptimizer restoredOptimizer = ((OptimizerJob)restoredObject).Optimizer; ReplaceOptimizer(parentOptimizersByPendingOptimizer[originalOptimizer], originalOptimizer, restoredOptimizer); fetchJobSemaphore.Release(); LogMessage(jobId, "FetchAndUpdateJob ended"); } private void UpdateJobItem(JobDto jobDto) { JobItem jobItem = jobItems.Single(x => x.JobDto.Id == jobDto.Id); jobItem.JobDto = jobDto; } public void AbortJob(Guid jobId) { IExecutionEngineFacade executionEngineFacade = GetExecutionEngineFacade(); Response response = executionEngineFacade.AbortJob(jobId); LogMessage(jobId, "Aborting Job: " + response.StatusMessage); } private SerializedJob CreateSerializedJob(IOptimizer optimizer) { IJob job = new OptimizerJob() { Optimizer = optimizer }; // serialize job MemoryStream memStream = new MemoryStream(); XmlGenerator.Serialize(job, memStream); byte[] jobByteArray = memStream.ToArray(); memStream.Dispose(); // find out which which plugins are needed for the given object List pluginsNeeded = ( from p in GetDeclaringPlugins(job.GetType()) select new HivePluginInfoDto() { Name = p.Name, Version = p.Version }).ToList(); JobDto jobDto = new JobDto() { CoresNeeded = 1, // [chn] how to determine real cores needed? PluginsNeeded = pluginsNeeded, State = State.Offline, MemoryNeeded = 0, UserId = Guid.Empty // [chn] set real userid here! }; SerializedJob serializedJob = new SerializedJob() { JobInfo = jobDto, SerializedJobData = jobByteArray }; return serializedJob; } private JobItem GetJobItemById(Guid jobId) { return jobItems.Single(x => x.JobDto.Id == jobId); } #endregion #region Result Polling public void StartResultPolling() { this.stopResultsPollingPending = false; this.IsPollingResults = true; resultPollingThread = CreateResultPollingThread(); if (resultPollingThread.ThreadState != System.Threading.ThreadState.Running) resultPollingThread.Start(); } public void StopResultPolling() { this.stopResultsPollingPending = true; resultPollingThread.Interrupt(); this.stopResultsPollingPending = false; } private Thread CreateResultPollingThread() { return new Thread(() => { try { do { IExecutionEngineFacade executionEngineFacade = GetExecutionEngineFacade(); IEnumerable jobIdsToQuery = from job in JobItems where job.State != State.Finished && job.State != State.Failed select job.JobDto.Id; if (jobIdsToQuery.Count() > 0) { LogMessage("Polling results for " + jobIdsToQuery.Count() + " jobs"); try { ResponseObject response = executionEngineFacade.GetAllJobResults(jobIdsToQuery); if (response.Success) { JobResultList jobItemList = response.Obj; UpdateJobItems(jobItemList); LogMessage("Polling successfully finished"); } } catch (Exception e) { LogMessage("Polling results failed: " + e.Message); } Thread.Sleep(resultPollingIntervalMs); } else { if (sendingJobsFinished) { // all the jobs have been sent to hive, but non are to query any more (all finished or failed) this.stopResultsPollingPending = true; } } } while (!this.stopResultsPollingPending); } catch (ThreadInterruptedException exception) { // thread has been interuppted } finally { this.IsPollingResults = false; } }); } #endregion #region Snapshots private void UpdateSnapshot(ResponseObject response) { JobItem jobItem = jobItems.Single(x => x.JobDto.Id == response.Obj.JobInfo.Id); jobItem.LatestSnapshot = response; } public void RequestSnapshot(Guid jobId) { Thread t = new Thread(() => { IExecutionEngineFacade executionEngineFacade = GetExecutionEngineFacade(); ResponseObject response; int retryCount = 0; Response snapShotResponse = executionEngineFacade.RequestSnapshot(jobId); if (snapShotResponse.StatusMessage == ApplicationConstants.RESPONSE_JOB_IS_NOT_BEEING_CALCULATED) { // job already finished Logger.Debug("HiveExperiment: Abort - GetLastResult(false)"); response = executionEngineFacade.GetLastSerializedResult(jobId, false, false); Logger.Debug("HiveExperiment: Abort - Server: " + response.StatusMessage + " success: " + response.Success); } else { // server sent snapshot request to client // poll until snapshot is ready do { Thread.Sleep(snapshotPollingIntervalMs); Logger.Debug("HiveExperiment: Abort - GetLastResult(true)"); response = executionEngineFacade.GetLastSerializedResult(jobId, false, true); Logger.Debug("HiveExperiment: Abort - Server: " + response.StatusMessage + " success: " + response.Success); retryCount++; // loop while // 1. problem with communication with server // 2. job result not yet ready } while ( (retryCount < maxSnapshotRetries) && ( !response.Success || response.StatusMessage == ApplicationConstants.RESPONSE_JOB_RESULT_NOT_YET_HERE) ); } if (response.Success) { LogMessage(jobId, "Snapshot polling successfull for job " + jobId); UpdateSnapshot(response); } else { LogMessage(jobId, "Error: Polling of Snapshot failed for job " + jobId + ": " + response.StatusMessage); } }); t.Start(); } void JobItem_SnapshotRequestedStateChanged(object sender, EventArgs e) { JobItem jobItem = (JobItem)sender; if (jobItem.SnapshotRequestedState == SnapshotRequestedState.Requested) { RequestSnapshot(jobItem.JobDto.Id); } } #endregion #region Required Plugin Search /// /// Returns a list of plugins in which the type itself and all members /// of the type are declared. Objectgraph is searched recursively. /// private IEnumerable GetDeclaringPlugins(Type type) { HashSet types = new HashSet(); FindTypes(type, types, "HeuristicLab."); return GetDeclaringPlugins(types); } /// /// Returns the plugins (including dependencies) in which the given types are declared /// private IEnumerable GetDeclaringPlugins(IEnumerable types) { HashSet plugins = new HashSet(); foreach (Type t in types) { FindDeclaringPlugins(ApplicationManager.Manager.GetDeclaringPlugin(t), plugins); } return plugins; } /// /// Finds the dependencies of the given plugin and adds it to the plugins hashset. /// Also searches the dependencies recursively. /// private void FindDeclaringPlugins(IPluginDescription plugin, HashSet plugins) { if (!plugins.Contains(plugin)) { plugins.Add(plugin); foreach (IPluginDescription dependency in plugin.Dependencies) { FindDeclaringPlugins(dependency, plugins); } } } /// /// Recursively finds all types used in type which are in a namespace which starts with namespaceStart /// Be aware that search is not performed on attributes /// /// the type to be searched /// found types will be stored there, needed in order to avoid duplicates /// only types from namespaces which start with this will be searched and added private void FindTypes(Type type, HashSet types, string namespaceStart) { if (!types.Contains(type) && type.Namespace.StartsWith(namespaceStart)) { types.Add(type); // constructors foreach (ConstructorInfo info in type.GetConstructors()) { foreach (ParameterInfo paramInfo in info.GetParameters()) { FindTypes(paramInfo.ParameterType, types, namespaceStart); } } // interfaces foreach (Type t in type.GetInterfaces()) { FindTypes(t, types, namespaceStart); } // events foreach (EventInfo info in type.GetEvents()) { FindTypes(info.EventHandlerType, types, namespaceStart); FindTypes(info.DeclaringType, types, namespaceStart); } // properties foreach (PropertyInfo info in type.GetProperties()) { FindTypes(info.PropertyType, types, namespaceStart); } // fields foreach (FieldInfo info in type.GetFields()) { FindTypes(info.FieldType, types, namespaceStart); } // methods foreach (MethodInfo info in type.GetMethods()) { foreach (ParameterInfo paramInfo in info.GetParameters()) { FindTypes(paramInfo.ParameterType, types, namespaceStart); } FindTypes(info.ReturnType, types, namespaceStart); } } } #endregion #region Eventhandler public event EventHandler ExecutionTimeChanged; private void OnExecutionTimeChanged() { EventHandler handler = ExecutionTimeChanged; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler ExecutionStateChanged; private void OnExecutionStateChanged() { LogMessage("ExecutionState changed to " + executionState.ToString()); EventHandler handler = ExecutionStateChanged; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler> ExceptionOccurred; public event EventHandler Started; private void OnStarted() { LogMessage("Started"); timer.Start(); EventHandler handler = Started; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler Stopped; private void OnStopped() { timer.Stop(); LogMessage("Stopped"); EventHandler handler = Stopped; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler Paused; private void OnPaused() { timer.Stop(); LogMessage("Paused"); EventHandler handler = Paused; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler Prepared; protected virtual void OnPrepared() { LogMessage("Prepared"); EventHandler handler = Prepared; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler ResourceIdsChanged; protected virtual void OnResourceIdsChanged() { EventHandler handler = ResourceIdsChanged; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler ExperimentChanged; protected virtual void OnExperimentChanged() { LogMessage("Experiment changed"); EventHandler handler = ExperimentChanged; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler ServerUrlChanged; protected virtual void OnServerUrlChanged() { EventHandler handler = ServerUrlChanged; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler IsResultsPollingChanged; private void OnIsPollingResultsChanged() { if (this.IsPollingResults) { LogMessage("Results Polling Started"); timer.Start(); } else { LogMessage("Results Polling Stopped"); timer.Stop(); } EventHandler handler = IsResultsPollingChanged; if (handler != null) handler(this, EventArgs.Empty); } private void RegisterJobItemListEvents() { jobItems.CollectionReset += new CollectionItemsChangedEventHandler>(jobItems_CollectionReset); jobItems.ItemsAdded += new CollectionItemsChangedEventHandler>(jobItems_ItemsAdded); jobItems.ItemsRemoved += new CollectionItemsChangedEventHandler>(jobItems_ItemsRemoved); jobItems.ItemsReplaced += new CollectionItemsChangedEventHandler>(jobItems_ItemsReplaced); foreach (JobItem jobItem in jobItems) { RegisterJobItemEvents(jobItem); } } private void DeregisterJobItemListEvents() { jobItems.CollectionReset -= new CollectionItemsChangedEventHandler>(jobItems_CollectionReset); jobItems.ItemsAdded -= new CollectionItemsChangedEventHandler>(jobItems_ItemsAdded); jobItems.ItemsRemoved -= new CollectionItemsChangedEventHandler>(jobItems_ItemsRemoved); jobItems.ItemsReplaced -= new CollectionItemsChangedEventHandler>(jobItems_ItemsReplaced); foreach (JobItem jobItem in jobItems) { DeregisterJobItemEvents(jobItem); } } void jobItems_ItemsReplaced(object sender, CollectionItemsChangedEventArgs> e) { UpdateJobItemEvents(e); } private void UpdateJobItemEvents(CollectionItemsChangedEventArgs> e) { if (e.OldItems != null) { foreach (var item in e.OldItems) { DeregisterJobItemEvents(item.Value); } } if (e.Items != null) { foreach (var item in e.Items) { RegisterJobItemEvents(item.Value); } } } private void RegisterJobItemEvents(JobItem jobItem) { jobItem.SnapshotRequestedStateChanged += new EventHandler(JobItem_SnapshotRequestedStateChanged); jobItem.JobStateChanged += new EventHandler(JobItem_JobStateChanged); } private void DeregisterJobItemEvents(JobItem jobItem) { jobItem.SnapshotRequestedStateChanged -= new EventHandler(JobItem_SnapshotRequestedStateChanged); jobItem.JobStateChanged -= new EventHandler(JobItem_JobStateChanged); } void jobItems_ItemsRemoved(object sender, CollectionItemsChangedEventArgs> e) { UpdateJobItemEvents(e); } void jobItems_ItemsAdded(object sender, CollectionItemsChangedEventArgs> e) { UpdateJobItemEvents(e); } void jobItems_CollectionReset(object sender, CollectionItemsChangedEventArgs> e) { foreach (var item in e.OldItems) { item.Value.JobStateChanged -= new EventHandler(JobItem_JobStateChanged); item.Value.SnapshotRequestedStateChanged -= new EventHandler(JobItem_SnapshotRequestedStateChanged); } } #endregion #region Helper Functions private IExecutionEngineFacade GetExecutionEngineFacade() { IExecutionEngineFacade executionEngineFacade = null; do { try { executionEngineFacade = ServiceLocator.CreateExecutionEngineFacade(ServerUrl); } catch (EndpointNotFoundException exception) { LogMessage("Could not connect to Server: " + exception.Message + ". Will try again in " + (resultPollingIntervalMs / 1000) + " sec."); Thread.Sleep(resultPollingIntervalMs); } } while (executionEngineFacade == null && this.ExecutionState != Core.ExecutionState.Stopped); return executionEngineFacade; } private void AddRange(IDictionary optimizers, IDictionary childs) { foreach (KeyValuePair kvp in childs) { optimizers.Add(kvp); } } #endregion #region Logging private void LogMessage(string message) { // HeuristicLab.Log is not Thread-Safe, so lock on every call lock (locker) { log.LogMessage(message); } } private void LogMessage(Guid jobId, string message) { GetJobItemById(jobId).LogMessage(message); LogMessage(message + " (jobId: " + jobId + ")"); } #endregion } }