#region License Information
/* HeuristicLab
* Copyright (C) 2002-2010 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using HeuristicLab.Collections;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Hive.Contracts.BusinessObjects;
using HeuristicLab.Hive.ExperimentManager.Jobs;
using HeuristicLab.Optimization;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
using HeuristicLab.PluginInfrastructure.Manager;
using HeuristicLab.PluginInfrastructure;
namespace HeuristicLab.Hive.ExperimentManager {
[Item("Hive Job", "Represents a hive job.")]
[StorableClass]
public class HiveJob : Item {
private static object locker = new object();
public override Image ItemImage {
get {
if (jobDto.Id == Guid.Empty) { // not yet uploaded
return HeuristicLab.Common.Resources.VSImageLibrary.Event;
} else {
if (jobDto.State == JobState.Offline) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutablePrepared;
else if (jobDto.State == JobState.WaitForChildJobs) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutablePrepared;
else if (jobDto.State == JobState.Calculating) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStarted;
else if (jobDto.State == JobState.Aborted) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStopped;
else if (jobDto.State == JobState.Failed) return HeuristicLab.Common.Resources.VSImageLibrary.Error;
else if (jobDto.State == JobState.Finished) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStopped;
else return HeuristicLab.Common.Resources.VSImageLibrary.Event;
}
}
}
[Storable]
private JobDto jobDto;
public JobDto JobDto {
get { return jobDto; }
set {
if (jobDto != value) {
jobDto = value;
OnJobDtoChanged();
OnToStringChanged();
OnItemImageChanged();
}
}
}
[Storable]
private OptimizerJob job;
public OptimizerJob Job {
get { return job; }
private set {
if (job != null && syncJobsWithOptimizers) {
this.childHiveJobs.Clear();
}
if (job != value) {
DergisterOptimizerEvents();
job = value;
if (job != null && job.ExecutionState == ExecutionState.Stopped) {
IsFinishedOptimizerDownloaded = true;
}
RegisterOptimizerEvents();
OnJobChanged();
}
}
}
[Storable]
private HiveJobList childHiveJobs;
public ReadOnlyItemList ChildHiveJobs {
get { return childHiveJobs.AsReadOnly(); }
}
[Storable]
private bool isFinishedOptimizerDownloaded;
public bool IsFinishedOptimizerDownloaded {
get { return isFinishedOptimizerDownloaded; }
set {
if (isFinishedOptimizerDownloaded != value) {
isFinishedOptimizerDownloaded = value;
OnIsFinishedOptimizerDownloadedChanged();
}
}
}
[Storable]
private bool syncJobsWithOptimizers = true;
public HiveJob() {
this.JobDto = new JobDto() {
State = JobState.Offline,
DateCreated = DateTime.Now,
CoresNeeded = 1,
MemoryNeeded = 0
};
this.childHiveJobs = new HiveJobList();
syncJobsWithOptimizers = true;
}
public HiveJob(JobDto jobDto)
: this() {
this.JobDto = jobDto;
}
public HiveJob(JobResult jobResult)
: this() {
UpdateFromJobResult(jobResult);
}
public HiveJob(OptimizerJob optimizerJob, bool autoCreateChildHiveJobs)
: this() {
this.syncJobsWithOptimizers = autoCreateChildHiveJobs;
this.Job = optimizerJob;
this.syncJobsWithOptimizers = true;
}
public HiveJob(IOptimizer optimizer)
: this() {
this.Job = new OptimizerJob(optimizer);
this.IsFinishedOptimizerDownloaded = false;
}
public HiveJob(SerializedJob serializedJob, bool autoCreateChildHiveJobs)
: this() {
this.syncJobsWithOptimizers = autoCreateChildHiveJobs;
this.JobDto = serializedJob.JobInfo;
try {
this.Job = SerializedJob.Deserialize(serializedJob.SerializedJobData);
}
catch {
this.Job = null;
}
this.syncJobsWithOptimizers = true;
}
[StorableConstructor]
protected HiveJob(bool deserializing) : base(deserializing) { }
protected HiveJob(HiveJob original, Cloner cloner)
: base(original, cloner) {
this.JobDto = cloner.Clone(original.jobDto);
this.Job = cloner.Clone(original.job);
}
public override IDeepCloneable Clone(Cloner cloner) {
return new HiveJob(this, cloner);
}
///
/// if this.Optimizer is an experiment
/// Uses the child-optimizers of this.HiveJob and creates HiveJob-childs
/// if this.Optimizer is a batchrun
/// Creates a number of child-jobs according to repetitions
///
private void UpdateChildHiveJobs() {
if (Job != null && Job.Optimizer != null && syncJobsWithOptimizers) {
if (Job.Optimizer is Optimization.Experiment) {
Optimization.Experiment experiment = (Optimization.Experiment)Job.Optimizer;
foreach (IOptimizer childOpt in experiment.Optimizers) {
this.childHiveJobs.Add(new HiveJob(childOpt));
}
} else if (Job.Optimizer is Optimization.BatchRun) {
Optimization.BatchRun batchRun = Job.OptimizerAsBatchRun;
if (batchRun.Optimizer != null) {
while (this.childHiveJobs.Count < batchRun.Repetitions) {
this.childHiveJobs.Add(new HiveJob(batchRun.Optimizer));
}
while (this.childHiveJobs.Count > batchRun.Repetitions) {
this.childHiveJobs.Remove(this.childHiveJobs.Last());
}
}
}
}
}
private void RegisterOptimizerEvents() {
if (Job != null && Job.Optimizer != null) {
if (Job.Optimizer is Optimization.Experiment) {
Optimization.Experiment experiment = Job.OptimizerAsExperiment;
experiment.Optimizers.ItemsAdded += new Collections.CollectionItemsChangedEventHandler>(Optimizers_ItemsAdded);
experiment.Optimizers.ItemsReplaced += new Collections.CollectionItemsChangedEventHandler>(Optimizers_ItemsReplaced);
experiment.Optimizers.ItemsRemoved += new Collections.CollectionItemsChangedEventHandler>(Optimizers_ItemsRemoved);
experiment.Optimizers.CollectionReset += new CollectionItemsChangedEventHandler>(Optimizers_CollectionReset);
} else if (Job.Optimizer is Optimization.BatchRun) {
Optimization.BatchRun batchRun = Job.OptimizerAsBatchRun;
batchRun.RepetitionsChanged += new EventHandler(batchRun_RepetitionsChanged);
batchRun.OptimizerChanged += new EventHandler(batchRun_OptimizerChanged);
}
Job.ComputeInParallelChanged += new EventHandler(Job_ComputeInParallelChanged);
Job.ToStringChanged += new EventHandler(Job_ToStringChanged);
}
}
private void DergisterOptimizerEvents() {
if (Job != null && Job.Optimizer is Optimization.Experiment) {
if (Job.Optimizer is Optimization.Experiment) {
Optimization.Experiment experiment = Job.OptimizerAsExperiment;
experiment.Optimizers.ItemsAdded -= new Collections.CollectionItemsChangedEventHandler>(Optimizers_ItemsAdded);
experiment.Optimizers.ItemsReplaced -= new Collections.CollectionItemsChangedEventHandler>(Optimizers_ItemsReplaced);
experiment.Optimizers.ItemsRemoved -= new Collections.CollectionItemsChangedEventHandler>(Optimizers_ItemsRemoved);
experiment.Optimizers.CollectionReset -= new CollectionItemsChangedEventHandler>(Optimizers_CollectionReset);
} else if (Job.Optimizer is Optimization.BatchRun) {
Optimization.BatchRun batchRun = Job.OptimizerAsBatchRun;
batchRun.RepetitionsChanged -= new EventHandler(batchRun_RepetitionsChanged);
batchRun.OptimizerChanged -= new EventHandler(batchRun_OptimizerChanged);
}
Job.ComputeInParallelChanged -= new EventHandler(Job_ComputeInParallelChanged);
Job.ToStringChanged -= new EventHandler(Job_ToStringChanged);
}
}
void batchRun_OptimizerChanged(object sender, EventArgs e) {
if (syncJobsWithOptimizers) {
this.childHiveJobs.Clear();
UpdateChildHiveJobs();
}
}
void batchRun_RepetitionsChanged(object sender, EventArgs e) {
if (syncJobsWithOptimizers) {
UpdateChildHiveJobs();
}
}
void Job_ToStringChanged(object sender, EventArgs e) {
this.OnToStringChanged();
}
private void Optimizers_ItemsAdded(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.Job.ComputeInParallel) {
foreach (var item in e.Items) {
if (GetChildByOptimizer(item.Value) == null && item.Value.Name != "Placeholder") {
this.childHiveJobs.Add(new HiveJob(item.Value));
}
}
}
}
private void Optimizers_ItemsReplaced(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.Job.ComputeInParallel) {
foreach (var item in e.OldItems) {
this.childHiveJobs.Remove(this.GetChildByOptimizer(item.Value));
}
foreach (var item in e.Items) {
if (GetChildByOptimizer(item.Value) == null && item.Value.Name != "Placeholder") {
this.childHiveJobs.Add(new HiveJob(item.Value));
}
}
}
}
private void Optimizers_ItemsRemoved(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.Job.ComputeInParallel) {
foreach (var item in e.Items) {
this.childHiveJobs.Remove(this.GetChildByOptimizer(item.Value));
}
}
}
void Optimizers_CollectionReset(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.Job.ComputeInParallel) {
foreach (var item in e.Items) {
this.childHiveJobs.Remove(this.GetChildByOptimizer(item.Value));
}
}
}
void Job_ComputeInParallelChanged(object sender, EventArgs e) {
if (Job != null && syncJobsWithOptimizers) {
if (Job.ComputeInParallel) {
// child-hive jobs are not yet created, so create them according to the child-optimizers
this.UpdateChildHiveJobs();
} else {
// child-hive jobs need to be deleted
this.childHiveJobs.Clear();
}
}
}
public void AddChildHiveJob(HiveJob hiveJob) {
this.childHiveJobs.Add(hiveJob);
syncJobsWithOptimizers = false;
if (this.Job != null && hiveJob.Job != null) {
if (this.Job.Optimizer is Optimization.Experiment) {
if (!this.Job.OptimizerAsExperiment.Optimizers.Contains(hiveJob.Job.Optimizer)) {
UpdateOptimizerInExperiment(this.Job.OptimizerAsExperiment, hiveJob.Job);
}
} else if (this.Job.Optimizer is Optimization.BatchRun) {
UpdateOptimizerInBatchRun(this.Job.OptimizerAsBatchRun, hiveJob.Job);
}
}
syncJobsWithOptimizers = true;
}
///
/// if this.Optimizer is Experiment
/// replace the child-optimizer in the experiment
/// if this.Optimizer is BatchRun
/// add the runs from the optimizerJob to the batchrun and replace the Optimizer
///
public void UpdateChildOptimizer(OptimizerJob optimizerJob, Guid childJobId) {
syncJobsWithOptimizers = false; // don't sync with optimizers during this method
bool childIsFinishedOptimizerDownloaded = false;
if (this.Job != null && this.Job.Optimizer != null) {
if (this.Job.Optimizer is Optimization.Experiment) {
UpdateOptimizerInExperiment(this.Job.OptimizerAsExperiment, optimizerJob);
childIsFinishedOptimizerDownloaded = true;
} else if (this.Job.Optimizer is Optimization.BatchRun) {
UpdateOptimizerInBatchRun(this.Job.OptimizerAsBatchRun, optimizerJob);
if (this.Job.OptimizerAsBatchRun.Repetitions == this.Job.Optimizer.Runs.Count) {
childIsFinishedOptimizerDownloaded = true;
}
} else {
childIsFinishedOptimizerDownloaded = optimizerJob.Optimizer.ExecutionState == ExecutionState.Stopped || optimizerJob.Optimizer.ExecutionState == ExecutionState.Prepared;
}
}
HiveJob child = this.ChildHiveJobs.Single(j => j.JobDto.Id == childJobId);
if (!optimizerJob.ComputeInParallel) {
child.syncJobsWithOptimizers = false;
child.Job = optimizerJob;
child.syncJobsWithOptimizers = true;
}
if (childIsFinishedOptimizerDownloaded) {
child.IsFinishedOptimizerDownloaded = true;
}
syncJobsWithOptimizers = true;
}
///
/// Adds the runs from the optimizerJob to the batchrun and replaces the Optimizer
/// Sideeffect: the optimizerJob.Optimizer will be prepared (scopes are deleted and executionstate will be reset)
///
private void UpdateOptimizerInBatchRun(BatchRun batchRun, OptimizerJob optimizerJob) {
if (batchRun.Optimizer == null) {
batchRun.Optimizer = optimizerJob.Optimizer; // only set the first optimizer as algorithm. if every time the Algorithm would be set, the runs would be cleared each time
}
foreach (IRun run in optimizerJob.Optimizer.Runs) {
if (!batchRun.Runs.Contains(run))
batchRun.Runs.Add(run);
}
}
///
/// replace the child-optimizer in the experiment
/// Sideeffect: the optimizerJob.Optimizer will be prepared (scopes are deleted and executionstate will be reset)
///
private void UpdateOptimizerInExperiment(Optimization.Experiment experiment, OptimizerJob optimizerJob) {
if (optimizerJob.IndexInParentOptimizerList < 0)
throw new IndexOutOfRangeException("IndexInParentOptimizerList must be equal or greater than zero! The Job is invalid and the optimizer-tree cannot be reassembled.");
while (experiment.Optimizers.Count < optimizerJob.IndexInParentOptimizerList) {
experiment.Optimizers.Add(new UserDefinedAlgorithm("Placeholder")); // add dummy-entries to Optimizers so that its possible to insert the optimizerJob at the correct position
}
if (experiment.Optimizers.Count < optimizerJob.IndexInParentOptimizerList + 1) {
experiment.Optimizers.Add(optimizerJob.Optimizer);
} else {
// if ComputeInParallel==true, don't replace the optimizer (except it is still a Placeholder)
// this is because Jobs with ComputeInParallel get submitted to hive with their child-optimizers deleted
if (!optimizerJob.ComputeInParallel || experiment.Optimizers[optimizerJob.IndexInParentOptimizerList].Name == "Placeholder") {
experiment.Optimizers[optimizerJob.IndexInParentOptimizerList] = optimizerJob.Optimizer;
}
}
}
///
/// Sets the IndexInParentOptimizerList property of the OptimizerJob
/// according to the position in the OptimizerList of the parentHiveJob.Job
/// Recursively updates all the child-jobs as well
///
internal void SetIndexInParentOptimizerList(HiveJob parentHiveJob) {
if (parentHiveJob != null && parentHiveJob.Job != null && parentHiveJob.Job.Optimizer != null) {
if (parentHiveJob.Job.Optimizer is Optimization.Experiment) {
this.Job.IndexInParentOptimizerList = parentHiveJob.Job.OptimizerAsExperiment.Optimizers.IndexOf(this.Job.Optimizer);
} else if (parentHiveJob.Job.Optimizer is Optimization.BatchRun) {
this.Job.IndexInParentOptimizerList = 0;
} else {
throw new NotSupportedException("Only Experiment and BatchRuns are supported");
}
}
foreach (HiveJob child in childHiveJobs) {
child.SetIndexInParentOptimizerList(this);
}
}
public override string ToString() {
if (job != null) {
return job.ToString();
} else {
return base.ToString();
}
}
public void UpdateFromJobResult(JobResult jobResult) {
if (jobResult != null) {
jobDto.Id = jobResult.Id;
jobDto.DateCreated = jobResult.DateCreated;
jobDto.DateCalculated = jobResult.DateCalculated;
jobDto.DateFinished = jobResult.DateFinished;
jobDto.Exception = jobResult.Exception;
jobDto.Id = jobResult.Id;
jobDto.ExecutionTime = jobResult.ExecutionTime;
jobDto.State = jobResult.State;
// what about parentJob
OnJobStateChanged();
OnToStringChanged();
OnItemImageChanged();
}
}
///
/// Creates a SerializedJob object containing the JobDto and the IJob-Object as byte[]
///
///
/// if true the Child-Optimizers will not be serialized (if the job contains an Experiment)
///
public SerializedJob GetAsSerializedJob(bool withoutChildOptimizers) {
if (this.job == null || this.job.Optimizer == null)
return null;
byte[] jobByteArray;
if (withoutChildOptimizers && this.Job.Optimizer is Optimization.Experiment) {
OptimizerJob clonedJob = (OptimizerJob)this.Job.Clone(); // use a cloned job, so that the childHiveJob don't get confused
clonedJob.OptimizerAsExperiment.Optimizers.Clear();
jobByteArray = SerializedJob.Serialize(clonedJob);
} else if (withoutChildOptimizers && this.Job.Optimizer is Optimization.BatchRun) {
OptimizerJob clonedJob = (OptimizerJob)this.Job.Clone();
clonedJob.OptimizerAsBatchRun.Optimizer = null;
jobByteArray = SerializedJob.Serialize(clonedJob);
} else if (this.Job.Optimizer is IAlgorithm) {
((IAlgorithm)this.Job.Optimizer).StoreAlgorithmInEachRun = false; // avoid storing the algorithm in runs to reduce size
jobByteArray = SerializedJob.Serialize(this.Job);
} else {
jobByteArray = SerializedJob.Serialize(this.Job);
}
UpdateRequiredPlugins();
SerializedJob serializedJob = new SerializedJob() {
JobInfo = jobDto,
SerializedJobData = jobByteArray
};
return serializedJob;
}
///
/// find out which which plugins are needed for the given object
///
private void UpdateRequiredPlugins() {
if (job != null) {
//this.JobDto.PluginsNeeded = HivePluginInfoDto.FindPluginsNeeded(job);
this.JobDto.PluginsNeeded = ApplicationManager.Manager.Plugins.Select(x => new HivePluginInfoDto { Name = x.Name, Version = x.Version }).ToList();
}
}
#region Events
public event EventHandler JobDtoChanged;
private void OnJobDtoChanged() {
LogMessage("JobDtoChanged");
EventHandler handler = JobDtoChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler JobStateChanged;
private void OnJobStateChanged() {
LogMessage("JobStateChanged (State: " + this.JobDto.State + ", ExecutionTime: " + this.JobDto.ExecutionTime.ToString() + ")");
EventHandler handler = JobStateChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler JobChanged;
private void OnJobChanged() {
Job_ComputeInParallelChanged(this, EventArgs.Empty);
var handler = JobChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler IsFinishedOptimizerDownloadedChanged;
private void OnIsFinishedOptimizerDownloadedChanged() {
var handler = IsFinishedOptimizerDownloadedChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
#endregion
public void LogMessage(string message) {
lock (locker) {
if (job != null) {
job.Log.LogMessage(message);
}
}
}
///
/// Returns a list of HiveJobs including this and all its child-jobs recursively
///
public IEnumerable GetAllHiveJobs() {
List jobs = new List();
jobs.Add(this);
foreach (HiveJob child in this.ChildHiveJobs) {
jobs.AddRange(child.GetAllHiveJobs());
}
return jobs;
}
public HiveJob GetParentByJobId(Guid jobId) {
if (this.ChildHiveJobs.SingleOrDefault(j => j.jobDto.Id == jobId) != null)
return this;
foreach (HiveJob child in this.childHiveJobs) {
HiveJob result = child.GetParentByJobId(jobId);
if (result != null)
return result;
}
return null;
}
public HiveJob GetChildByOptimizerJob(OptimizerJob optimizerJob) {
foreach (var child in ChildHiveJobs) {
if (child.Job == optimizerJob)
return child;
}
return null;
}
public HiveJob GetChildByOptimizer(IOptimizer optimizer) {
foreach (var child in ChildHiveJobs) {
if (child.Job.Optimizer == optimizer)
return child;
}
return null;
}
///
/// Searches for an HiveJob object with the correct jobId recursively
///
public HiveJob GetHiveJobByJobId(Guid jobId) {
if (this.JobDto.Id == jobId) {
return this;
} else {
foreach (HiveJob child in this.ChildHiveJobs) {
HiveJob result = child.GetHiveJobByJobId(jobId);
if (result != null)
return result;
}
}
return null;
}
public void RemoveByJobId(Guid jobId) {
IEnumerable jobs = ChildHiveJobs.Where(j => j.JobDto.Id == jobId).ToList(); // if Guid.Empty needs to be removed, there could be more than one with this jobId
foreach (HiveJob j in jobs) {
this.childHiveJobs.Remove(j);
}
foreach (HiveJob child in ChildHiveJobs) {
child.RemoveByJobId(jobId);
}
}
}
}