#region License Information
/* HeuristicLab
* Copyright (C) 2002-2010 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using HeuristicLab.Clients.Hive.Jobs;
using HeuristicLab.Collections;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Optimization;
using HeuristicLab.PluginInfrastructure;
namespace HeuristicLab.Clients.Hive {
[Item("Hive Job", "Represents a hive job.")]
public class HiveJob : NamedItem, IItemTree {
private static object locker = new object();
public override Image ItemImage {
get {
if (job.Id == Guid.Empty) { // not yet uploaded
return HeuristicLab.Common.Resources.VSImageLibrary.Event;
} else {
if (job.State == JobState.Waiting) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutablePrepared;
else if (job.State == JobState.Calculating) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStarted;
else if (job.State == JobState.Transferring) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStarted;
else if (job.State == JobState.Aborted) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStopped;
else if (job.State == JobState.Failed) return HeuristicLab.Common.Resources.VSImageLibrary.Error;
else if (job.State == JobState.Finished) return HeuristicLab.Common.Resources.VSImageLibrary.ExecutableStopped;
else return HeuristicLab.Common.Resources.VSImageLibrary.Event;
}
}
}
private Job job;
public Job Job {
get { return job; }
set {
if (job != value) {
job = value;
OnJobChanged();
OnToStringChanged();
OnItemImageChanged();
}
}
}
private OptimizerJob optimizerJob;
public OptimizerJob OptimizerJob {
get { return optimizerJob; }
private set {
if (optimizerJob != null && syncJobsWithOptimizers) {
this.childHiveJobs.Clear();
}
if (optimizerJob != value) {
DergisterOptimizerEvents();
optimizerJob = value;
if (optimizerJob.ExecutionState == ExecutionState.Stopped) {
IsFinishedOptimizerDownloaded = true;
}
RegisterOptimizerEvents();
OnOptimizerJobChanged();
}
}
}
private ItemList childHiveJobs;
public ReadOnlyItemList ChildHiveJobs {
get { return childHiveJobs.AsReadOnly(); }
}
private bool isFinishedOptimizerDownloaded;
public bool IsFinishedOptimizerDownloaded {
get { return isFinishedOptimizerDownloaded; }
set {
if (isFinishedOptimizerDownloaded != value) {
isFinishedOptimizerDownloaded = value;
OnIsFinishedOptimizerDownloadedChanged();
}
}
}
private bool syncJobsWithOptimizers = true;
public HiveJob() {
this.Job = new Job() {
CoresNeeded = 1,
MemoryNeeded = 0
};
job.SetState(JobState.Offline);
this.childHiveJobs = new ItemList();
syncJobsWithOptimizers = true;
}
public HiveJob(OptimizerJob optimizerJob, bool autoCreateChildHiveJobs)
: this() {
this.syncJobsWithOptimizers = autoCreateChildHiveJobs;
this.OptimizerJob = optimizerJob;
this.syncJobsWithOptimizers = true;
}
public HiveJob(IOptimizer optimizer)
: this() {
this.OptimizerJob = new OptimizerJob(optimizer);
}
public HiveJob(Job job, JobData jobData, bool autoCreateChildHiveJobs) {
this.syncJobsWithOptimizers = autoCreateChildHiveJobs;
this.Job = job;
try {
this.OptimizerJob = PersistenceUtil.Deserialize(jobData.Data);
}
catch {
this.OptimizerJob = null;
}
this.childHiveJobs = new ItemList();
this.syncJobsWithOptimizers = true;
}
protected HiveJob(HiveJob original, Cloner cloner)
: base(original, cloner) {
this.Job = cloner.Clone(original.job);
this.OptimizerJob = cloner.Clone(original.OptimizerJob);
}
public override IDeepCloneable Clone(Cloner cloner) {
return new HiveJob(this, cloner);
}
///
/// if this.Optimizer is an experiment
/// Uses the child-optimizers of this.HiveJob and creates HiveJob-childs
/// if this.Optimizer is a batchrun
/// Creates a number of child-jobs according to repetitions
///
private void UpdateChildHiveJobs() {
if (Job != null && syncJobsWithOptimizers) {
if (OptimizerJob.Optimizer is Optimization.Experiment) {
Optimization.Experiment experiment = (Optimization.Experiment)OptimizerJob.Optimizer;
foreach (IOptimizer childOpt in experiment.Optimizers) {
this.childHiveJobs.Add(new HiveJob(childOpt));
}
} else if (OptimizerJob.Optimizer is Optimization.BatchRun) {
Optimization.BatchRun batchRun = OptimizerJob.OptimizerAsBatchRun;
if (batchRun.Optimizer != null) {
while (this.childHiveJobs.Count < batchRun.Repetitions) {
this.childHiveJobs.Add(new HiveJob(batchRun.Optimizer));
}
while (this.childHiveJobs.Count > batchRun.Repetitions) {
this.childHiveJobs.Remove(this.childHiveJobs.Last());
}
}
}
}
}
private void RegisterOptimizerEvents() {
if (OptimizerJob != null) {
if (OptimizerJob.Optimizer is Optimization.Experiment) {
Optimization.Experiment experiment = OptimizerJob.OptimizerAsExperiment;
experiment.Optimizers.ItemsAdded += new CollectionItemsChangedEventHandler>(Optimizers_ItemsAdded);
experiment.Optimizers.ItemsReplaced += new CollectionItemsChangedEventHandler>(Optimizers_ItemsReplaced);
experiment.Optimizers.ItemsRemoved += new CollectionItemsChangedEventHandler>(Optimizers_ItemsRemoved);
experiment.Optimizers.CollectionReset += new CollectionItemsChangedEventHandler>(Optimizers_CollectionReset);
} else if (OptimizerJob.Optimizer is Optimization.BatchRun) {
Optimization.BatchRun batchRun = OptimizerJob.OptimizerAsBatchRun;
batchRun.RepetitionsChanged += new EventHandler(batchRun_RepetitionsChanged);
batchRun.OptimizerChanged += new EventHandler(batchRun_OptimizerChanged);
}
OptimizerJob.ComputeInParallelChanged += new EventHandler(OptimizerJob_ComputeInParallelChanged);
OptimizerJob.ToStringChanged += new EventHandler(OptimizerJob_ToStringChanged);
}
}
private void DergisterOptimizerEvents() {
if (OptimizerJob != null) {
if (OptimizerJob.Optimizer is Optimization.Experiment) {
Optimization.Experiment experiment = OptimizerJob.OptimizerAsExperiment;
experiment.Optimizers.ItemsAdded -= new CollectionItemsChangedEventHandler>(Optimizers_ItemsAdded);
experiment.Optimizers.ItemsReplaced -= new CollectionItemsChangedEventHandler>(Optimizers_ItemsReplaced);
experiment.Optimizers.ItemsRemoved -= new CollectionItemsChangedEventHandler>(Optimizers_ItemsRemoved);
experiment.Optimizers.CollectionReset -= new CollectionItemsChangedEventHandler>(Optimizers_CollectionReset);
} else if (OptimizerJob.Optimizer is Optimization.BatchRun) {
Optimization.BatchRun batchRun = OptimizerJob.OptimizerAsBatchRun;
batchRun.RepetitionsChanged -= new EventHandler(batchRun_RepetitionsChanged);
batchRun.OptimizerChanged -= new EventHandler(batchRun_OptimizerChanged);
}
OptimizerJob.ComputeInParallelChanged -= new EventHandler(OptimizerJob_ComputeInParallelChanged);
OptimizerJob.ToStringChanged -= new EventHandler(OptimizerJob_ToStringChanged);
}
}
private void batchRun_OptimizerChanged(object sender, EventArgs e) {
if (syncJobsWithOptimizers) {
this.childHiveJobs.Clear();
UpdateChildHiveJobs();
}
}
private void batchRun_RepetitionsChanged(object sender, EventArgs e) {
if (syncJobsWithOptimizers) {
UpdateChildHiveJobs();
}
}
private void OptimizerJob_ToStringChanged(object sender, EventArgs e) {
this.OnToStringChanged();
}
private void Optimizers_ItemsAdded(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.OptimizerJob.ComputeInParallel) {
foreach (var item in e.Items) {
if (GetChildByOptimizer(item.Value) == null && item.Value.Name != "Placeholder") {
this.childHiveJobs.Add(new HiveJob(item.Value));
}
}
}
}
private void Optimizers_ItemsReplaced(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.OptimizerJob.ComputeInParallel) {
foreach (var item in e.OldItems) {
this.childHiveJobs.Remove(this.GetChildByOptimizer(item.Value));
}
foreach (var item in e.Items) {
if (GetChildByOptimizer(item.Value) == null && item.Value.Name != "Placeholder") {
this.childHiveJobs.Add(new HiveJob(item.Value));
}
}
}
}
private void Optimizers_ItemsRemoved(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.OptimizerJob.ComputeInParallel) {
foreach (var item in e.Items) {
this.childHiveJobs.Remove(this.GetChildByOptimizer(item.Value));
}
}
}
private void Optimizers_CollectionReset(object sender, CollectionItemsChangedEventArgs> e) {
if (syncJobsWithOptimizers && this.OptimizerJob.ComputeInParallel) {
foreach (var item in e.Items) {
this.childHiveJobs.Remove(this.GetChildByOptimizer(item.Value));
}
}
}
private void OptimizerJob_ComputeInParallelChanged(object sender, EventArgs e) {
if (OptimizerJob != null && syncJobsWithOptimizers) {
if (OptimizerJob.ComputeInParallel) {
// child-hive jobs are not yet created, so create them according to the child-optimizers
this.UpdateChildHiveJobs();
} else {
// child-hive jobs need to be deleted
this.childHiveJobs.Clear();
}
}
}
public void AddChildHiveJob(HiveJob hiveJob) {
this.childHiveJobs.Add(hiveJob);
syncJobsWithOptimizers = false;
if (this.OptimizerJob != null && hiveJob.OptimizerJob != null) {
if (this.OptimizerJob.Optimizer is Optimization.Experiment) {
if (!this.OptimizerJob.OptimizerAsExperiment.Optimizers.Contains(hiveJob.OptimizerJob.Optimizer)) {
UpdateOptimizerInExperiment(this.OptimizerJob.OptimizerAsExperiment, hiveJob.OptimizerJob);
}
} else if (this.OptimizerJob.Optimizer is Optimization.BatchRun) {
UpdateOptimizerInBatchRun(this.OptimizerJob.OptimizerAsBatchRun, hiveJob.OptimizerJob);
}
}
syncJobsWithOptimizers = true;
}
///
/// if this.Optimizer is Experiment
/// replace the child-optimizer in the experiment
/// if this.Optimizer is BatchRun
/// add the runs from the optimizerJob to the batchrun and replace the Optimizer
///
public void UpdateChildOptimizer(OptimizerJob optimizerJob, Guid childJobId) {
syncJobsWithOptimizers = false; // don't sync with optimizers during this method
bool childIsFinishedOptimizerDownloaded = false;
if (this.OptimizerJob != null && this.OptimizerJob.Optimizer != null) {
if (this.OptimizerJob.Optimizer is Optimization.Experiment) {
UpdateOptimizerInExperiment(this.OptimizerJob.OptimizerAsExperiment, optimizerJob);
childIsFinishedOptimizerDownloaded = true;
} else if (this.OptimizerJob.Optimizer is Optimization.BatchRun) {
UpdateOptimizerInBatchRun(this.OptimizerJob.OptimizerAsBatchRun, optimizerJob);
if (this.OptimizerJob.OptimizerAsBatchRun.Repetitions == this.OptimizerJob.Optimizer.Runs.Count) {
childIsFinishedOptimizerDownloaded = true;
}
} else {
childIsFinishedOptimizerDownloaded = optimizerJob.Optimizer.ExecutionState == ExecutionState.Stopped;
}
}
HiveJob child = this.ChildHiveJobs.Single(j => j.Job.Id == childJobId);
if (!optimizerJob.ComputeInParallel) {
child.syncJobsWithOptimizers = false;
child.OptimizerJob = optimizerJob;
child.syncJobsWithOptimizers = true;
}
if (childIsFinishedOptimizerDownloaded) {
child.IsFinishedOptimizerDownloaded = true;
}
syncJobsWithOptimizers = true;
}
///
/// Adds the runs from the optimizerJob to the batchrun and replaces the Optimizer
/// Sideeffect: the optimizerJob.Optimizer will be prepared (scopes are deleted and executionstate will be reset)
///
private void UpdateOptimizerInBatchRun(BatchRun batchRun, OptimizerJob optimizerJob) {
if (batchRun.Optimizer == null) {
batchRun.Optimizer = (IOptimizer)optimizerJob.Optimizer; // only set the first optimizer as Optimizer. if every time the Optimizer would be set, the runs would be cleared each time
}
foreach (IRun run in optimizerJob.Optimizer.Runs) {
if (!batchRun.Runs.Contains(run))
batchRun.Runs.Add(run);
}
}
///
/// replace the child-optimizer in the experiment
/// Sideeffect: the optimizerJob.Optimizer will be prepared (scopes are deleted and executionstate will be reset)
///
private void UpdateOptimizerInExperiment(Optimization.Experiment experiment, OptimizerJob optimizerJob) {
if (optimizerJob.IndexInParentOptimizerList < 0)
throw new IndexOutOfRangeException("IndexInParentOptimizerList must be equal or greater than zero! The Job is invalid and the optimizer-tree cannot be reassembled.");
while (experiment.Optimizers.Count < optimizerJob.IndexInParentOptimizerList) {
experiment.Optimizers.Add(new UserDefinedAlgorithm("Placeholder")); // add dummy-entries to Optimizers so that its possible to insert the optimizerJob at the correct position
}
if (experiment.Optimizers.Count < optimizerJob.IndexInParentOptimizerList + 1) {
experiment.Optimizers.Add(optimizerJob.Optimizer);
} else {
// if ComputeInParallel==true, don't replace the optimizer (except it is still a Placeholder)
// this is because Jobs with ComputeInParallel get submitted to hive with their child-optimizers deleted
if (!optimizerJob.ComputeInParallel || experiment.Optimizers[optimizerJob.IndexInParentOptimizerList].Name == "Placeholder") {
experiment.Optimizers[optimizerJob.IndexInParentOptimizerList] = optimizerJob.Optimizer;
}
}
}
///
/// Sets the IndexInParentOptimizerList property of the OptimizerJob
/// according to the position in the OptimizerList of the parentHiveJob.Job
/// Recursively updates all the child-jobs as well
///
internal void SetIndexInParentOptimizerList(HiveJob parentHiveJob) {
if (parentHiveJob != null) {
if (parentHiveJob.OptimizerJob.Optimizer is Optimization.Experiment) {
this.OptimizerJob.IndexInParentOptimizerList = parentHiveJob.OptimizerJob.OptimizerAsExperiment.Optimizers.IndexOf(this.OptimizerJob.Optimizer);
} else if (parentHiveJob.OptimizerJob.Optimizer is Optimization.BatchRun) {
this.OptimizerJob.IndexInParentOptimizerList = 0;
} else {
throw new NotSupportedException("Only Experiment and BatchRuns are supported");
}
}
foreach (HiveJob child in childHiveJobs) {
child.SetIndexInParentOptimizerList(this);
}
}
public override string ToString() {
if (optimizerJob != null) {
return optimizerJob.ToString();
} else {
return base.ToString();
}
}
public void UpdateFromLightweightJob(LightweightJob lightweightJob) {
if (lightweightJob != null) {
job.Id = lightweightJob.Id;
job.Id = lightweightJob.Id;
job.ExecutionTime = lightweightJob.ExecutionTime;
job.State = lightweightJob.State;
job.StateLog = new List(lightweightJob.StateLog);
// what about parentJob
OnJobStateChanged();
OnToStringChanged();
OnItemImageChanged();
}
}
///
/// Creates a JobData object containing the Job and the IJob-Object as byte[]
///
///
/// if true the Child-Optimizers will not be serialized (if the job contains an Experiment)
///
public JobData GetAsJobData(bool withoutChildOptimizers, out List plugins) {
plugins = new List();
if (this.optimizerJob == null || this.optimizerJob.Optimizer == null)
return null;
IEnumerable usedTypes;
byte[] jobByteArray;
if (withoutChildOptimizers && this.OptimizerJob.Optimizer is Optimization.Experiment) {
OptimizerJob clonedJob = (OptimizerJob)this.OptimizerJob.Clone(); // use a cloned job, so that the childHiveJob don't get confused
clonedJob.OptimizerAsExperiment.Optimizers.Clear();
jobByteArray = PersistenceUtil.Serialize(clonedJob, out usedTypes);
} else if (withoutChildOptimizers && this.OptimizerJob.Optimizer is Optimization.BatchRun) {
OptimizerJob clonedJob = (OptimizerJob)this.OptimizerJob.Clone();
clonedJob.OptimizerAsBatchRun.Optimizer = null;
jobByteArray = PersistenceUtil.Serialize(clonedJob, out usedTypes);
} else if (this.OptimizerJob.Optimizer is IAlgorithm) {
((IAlgorithm)this.OptimizerJob.Optimizer).StoreAlgorithmInEachRun = false; // avoid storing the algorithm in runs to reduce size
jobByteArray = PersistenceUtil.Serialize(this.OptimizerJob, out usedTypes);
} else {
jobByteArray = PersistenceUtil.Serialize(this.OptimizerJob, out usedTypes);
}
JobData jobData = new JobData() {
JobId = job.Id,
Data = jobByteArray
};
CollectDeclaringPlugins(plugins, usedTypes);
return jobData;
}
private void CollectDeclaringPlugins(List plugins, IEnumerable usedTypes) {
foreach (Type type in usedTypes) {
var plugin = ApplicationManager.Manager.GetDeclaringPlugin(type);
if (plugin != null && !plugins.Contains(plugin)) {
plugins.Add(plugin);
CollectPluginDependencies(plugins, plugin);
}
}
}
private void CollectPluginDependencies(List plugins, IPluginDescription plugin) {
if (plugin == null) return;
foreach (var dependency in plugin.Dependencies) {
if (!plugins.Contains(dependency)) {
plugins.Add(dependency);
CollectPluginDependencies(plugins, dependency);
}
}
}
#region Events
public event EventHandler JobChanged;
private void OnJobChanged() {
LogMessage("JobChanged");
EventHandler handler = JobChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler JobStateChanged;
private void OnJobStateChanged() {
LogMessage("JobStateChanged (State: " + this.Job.State + ", ExecutionTime: " + this.Job.ExecutionTime.ToString() + ")");
EventHandler handler = JobStateChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler OptimizerJobChanged;
private void OnOptimizerJobChanged() {
OptimizerJob_ComputeInParallelChanged(this, EventArgs.Empty);
var handler = JobChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler IsFinishedOptimizerDownloadedChanged;
private void OnIsFinishedOptimizerDownloadedChanged() {
var handler = IsFinishedOptimizerDownloadedChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
#endregion
public void LogMessage(string message) {
lock (locker) {
if (optimizerJob != null) {
optimizerJob.Log.LogMessage(message);
}
}
}
///
/// Returns a list of HiveJobs including this and all its child-jobs recursively
///
public IEnumerable GetAllHiveJobs() {
List jobs = new List();
jobs.Add(this);
foreach (HiveJob child in this.ChildHiveJobs) {
jobs.AddRange(child.GetAllHiveJobs());
}
return jobs;
}
public HiveJob GetParentByJobId(Guid jobId) {
if (this.ChildHiveJobs.SingleOrDefault(j => j.job.Id == jobId) != null)
return this;
foreach (HiveJob child in this.childHiveJobs) {
HiveJob result = child.GetParentByJobId(jobId);
if (result != null)
return result;
}
return null;
}
public HiveJob GetChildByOptimizerJob(OptimizerJob optimizerJob) {
foreach (var child in ChildHiveJobs) {
if (child.OptimizerJob == optimizerJob)
return child;
}
return null;
}
public HiveJob GetChildByOptimizer(IOptimizer optimizer) {
foreach (var child in ChildHiveJobs) {
if (child.OptimizerJob.Optimizer == optimizer)
return child;
}
return null;
}
///
/// Searches for an HiveJob object with the correct jobId recursively
///
public HiveJob GetHiveJobByJobId(Guid jobId) {
if (this.Job.Id == jobId) {
return this;
} else {
foreach (HiveJob child in this.ChildHiveJobs) {
HiveJob result = child.GetHiveJobByJobId(jobId);
if (result != null)
return result;
}
}
return null;
}
public void RemoveByJobId(Guid jobId) {
IEnumerable jobs = ChildHiveJobs.Where(j => j.Job.Id == jobId).ToList(); // if Guid.Empty needs to be removed, there could be more than one with this jobId
foreach (HiveJob j in jobs) {
this.childHiveJobs.Remove(j);
}
foreach (HiveJob child in ChildHiveJobs) {
child.RemoveByJobId(jobId);
}
}
public IEnumerable GetChildNodes() {
return this.ChildHiveJobs;
}
}
}