#region License Information
/* HeuristicLab
* Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Optimization;
using HeuristicLab.PluginInfrastructure;
using TS = System.Threading.Tasks;
namespace HeuristicLab.Clients.Hive {
[Item("HiveClient", "Hive client.")]
public sealed class HiveClient : IContent {
private static HiveClient instance;
public static HiveClient Instance {
get {
if (instance == null) instance = new HiveClient();
return instance;
}
}
#region Properties
private ItemCollection jobs;
public ItemCollection Jobs {
get { return jobs; }
set {
if (value != jobs) {
jobs = value;
OnHiveExperimentsChanged();
}
}
}
private List onlinePlugins;
public List OnlinePlugins {
get { return onlinePlugins; }
set { onlinePlugins = value; }
}
private List alreadyUploadedPlugins;
public List AlreadyUploadedPlugins {
get { return alreadyUploadedPlugins; }
set { alreadyUploadedPlugins = value; }
}
private bool isAllowedPrivileged;
public bool IsAllowedPrivileged {
get { return isAllowedPrivileged; }
set { isAllowedPrivileged = value; }
}
#endregion
private HiveClient() { }
#region Refresh
public void Refresh() {
OnRefreshing();
try {
this.IsAllowedPrivileged = HiveServiceLocator.Instance.CallHiveService((s) => s.IsAllowedPrivileged());
var oldJobs = jobs ?? new ItemCollection();
jobs = new HiveItemCollection();
var jobsLoaded = HiveServiceLocator.Instance.CallHiveService>(s => s.GetJobs());
foreach (var j in jobsLoaded) {
var job = oldJobs.SingleOrDefault(x => x.Id == j.Id);
if (job == null) {
// new
jobs.Add(new RefreshableJob(j) { IsAllowedPrivileged = this.isAllowedPrivileged });
} else {
// update
job.Job = j;
job.IsAllowedPrivileged = this.isAllowedPrivileged;
jobs.Add(job);
}
}
// remove those which were not in the list of loaded hiveexperiments
foreach (var job in oldJobs) {
if (job.Id == Guid.Empty) {
// experiment not uploaded... keep
jobs.Add(job);
} else {
job.RefreshAutomatically = false; // stop results polling
}
}
}
catch {
jobs = null;
throw;
}
finally {
OnRefreshed();
}
}
public void RefreshAsync(Action exceptionCallback) {
var call = new Func(delegate() {
try {
Refresh();
}
catch (Exception ex) {
return ex;
}
return null;
});
call.BeginInvoke(delegate(IAsyncResult result) {
Exception ex = call.EndInvoke(result);
if (ex != null) exceptionCallback(ex);
}, null);
}
#endregion
#region Store
public static void Store(IHiveItem item, CancellationToken cancellationToken) {
if (item.Id == Guid.Empty) {
if (item is RefreshableJob) {
HiveClient.Instance.UploadJob((RefreshableJob)item, cancellationToken);
}
if (item is JobPermission) {
var hep = (JobPermission)item;
hep.GrantedUserId = HiveServiceLocator.Instance.CallHiveService((s) => s.GetUserIdByUsername(hep.GrantedUserName));
if (hep.GrantedUserId == Guid.Empty) {
throw new ArgumentException(string.Format("The user {0} was not found.", hep.GrantedUserName));
}
HiveServiceLocator.Instance.CallHiveService((s) => s.GrantPermission(hep.JobId, hep.GrantedUserId, hep.Permission));
}
} else {
if (item is Job)
HiveServiceLocator.Instance.CallHiveService(s => s.UpdateJob((Job)item));
}
}
public static void StoreAsync(Action exceptionCallback, IHiveItem item, CancellationToken cancellationToken) {
var call = new Func(delegate() {
try {
Store(item, cancellationToken);
}
catch (Exception ex) {
return ex;
}
return null;
});
call.BeginInvoke(delegate(IAsyncResult result) {
Exception ex = call.EndInvoke(result);
if (ex != null) exceptionCallback(ex);
}, null);
}
#endregion
#region Delete
public static void Delete(IHiveItem item) {
if (item.Id == Guid.Empty && item.GetType() != typeof(JobPermission))
return;
if (item is Job)
HiveServiceLocator.Instance.CallHiveService(s => s.DeleteJob(item.Id));
if (item is RefreshableJob) {
RefreshableJob job = (RefreshableJob)item;
if (job.RefreshAutomatically) {
job.StopResultPolling();
}
HiveServiceLocator.Instance.CallHiveService(s => s.DeleteJob(item.Id));
}
if (item is JobPermission) {
var hep = (JobPermission)item;
HiveServiceLocator.Instance.CallHiveService(s => s.RevokePermission(hep.JobId, hep.GrantedUserId));
}
item.Id = Guid.Empty;
}
#endregion
#region Events
public event EventHandler Refreshing;
private void OnRefreshing() {
EventHandler handler = Refreshing;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler Refreshed;
private void OnRefreshed() {
var handler = Refreshed;
if (handler != null) handler(this, EventArgs.Empty);
}
public event EventHandler HiveExperimentsChanged;
private void OnHiveExperimentsChanged() {
var handler = HiveExperimentsChanged;
if (handler != null) handler(this, EventArgs.Empty);
}
#endregion
public static void StartJob(Action exceptionCallback, RefreshableJob refreshableJob, CancellationToken cancellationToken) {
HiveClient.StoreAsync(
new Action((Exception ex) => {
refreshableJob.ExecutionState = ExecutionState.Prepared;
exceptionCallback(ex);
}), refreshableJob, cancellationToken);
refreshableJob.ExecutionState = ExecutionState.Started;
}
public static void PauseJob(RefreshableJob refreshableJob) {
HiveServiceLocator.Instance.CallHiveService(service => {
foreach (HiveTask task in refreshableJob.GetAllHiveTasks()) {
if (task.Task.State != TaskState.Finished && task.Task.State != TaskState.Aborted && task.Task.State != TaskState.Failed)
service.PauseTask(task.Task.Id);
}
});
refreshableJob.ExecutionState = ExecutionState.Paused;
}
public static void StopJob(RefreshableJob refreshableJob) {
HiveServiceLocator.Instance.CallHiveService(service => {
foreach (HiveTask task in refreshableJob.GetAllHiveTasks()) {
if (task.Task.State != TaskState.Finished && task.Task.State != TaskState.Aborted && task.Task.State != TaskState.Failed)
service.StopTask(task.Task.Id);
}
});
refreshableJob.ExecutionState = ExecutionState.Stopped;
}
public static void ResumeJob(RefreshableJob refreshableJob) {
HiveServiceLocator.Instance.CallHiveService(service => {
foreach (HiveTask task in refreshableJob.GetAllHiveTasks()) {
if (task.Task.State == TaskState.Paused) {
service.RestartTask(task.Task.Id);
}
}
});
refreshableJob.ExecutionState = ExecutionState.Started;
}
#region Upload Job
private Semaphore taskUploadSemaphore = new Semaphore(Settings.Default.MaxParallelUploads, Settings.Default.MaxParallelUploads);
private static object jobCountLocker = new object();
private static object pluginLocker = new object();
private void UploadJob(RefreshableJob refreshableJob, CancellationToken cancellationToken) {
try {
refreshableJob.Progress = new Progress("Connecting to server...");
refreshableJob.IsProgressing = true;
IEnumerable resourceNames = ToResourceNameList(refreshableJob.Job.ResourceNames);
var resourceIds = new List();
foreach (var resourceName in resourceNames) {
Guid resourceId = HiveServiceLocator.Instance.CallHiveService((s) => s.GetResourceId(resourceName));
if (resourceId == Guid.Empty) {
throw new ResourceNotFoundException(string.Format("Could not find the resource '{0}'", resourceName));
}
resourceIds.Add(resourceId);
}
foreach (OptimizerHiveTask hiveJob in refreshableJob.HiveTasks.OfType()) {
hiveJob.SetIndexInParentOptimizerList(null);
}
// upload Job
refreshableJob.Progress.Status = "Uploading Job...";
refreshableJob.Job.Id = HiveServiceLocator.Instance.CallHiveService((s) => s.AddJob(refreshableJob.Job));
bool isPrivileged = refreshableJob.Job.IsPrivileged;
refreshableJob.Job = HiveServiceLocator.Instance.CallHiveService((s) => s.GetJob(refreshableJob.Job.Id)); // update owner and permissions
refreshableJob.Job.IsPrivileged = isPrivileged;
cancellationToken.ThrowIfCancellationRequested();
int totalJobCount = refreshableJob.GetAllHiveTasks().Count();
int[] jobCount = new int[1]; // use a reference type (int-array) instead of value type (int) in order to pass the value via a delegate to task-parallel-library
cancellationToken.ThrowIfCancellationRequested();
// upload plugins
refreshableJob.Progress.Status = "Uploading plugins...";
this.OnlinePlugins = HiveServiceLocator.Instance.CallHiveService((s) => s.GetPlugins());
this.AlreadyUploadedPlugins = new List();
Plugin configFilePlugin = HiveServiceLocator.Instance.CallHiveService((s) => UploadConfigurationFile(s, onlinePlugins));
this.alreadyUploadedPlugins.Add(configFilePlugin);
cancellationToken.ThrowIfCancellationRequested();
if (refreshableJob.RefreshAutomatically) refreshableJob.StartResultPolling();
// upload tasks
refreshableJob.Progress.Status = "Uploading tasks...";
var tasks = new List();
foreach (HiveTask hiveTask in refreshableJob.HiveTasks) {
tasks.Add(TS.Task.Factory.StartNew((hj) => {
UploadTaskWithChildren(refreshableJob.Progress, (HiveTask)hj, null, resourceIds, jobCount, totalJobCount, configFilePlugin.Id, refreshableJob.Job.Id, refreshableJob.Log, refreshableJob.Job.IsPrivileged, cancellationToken);
}, hiveTask)
.ContinueWith((x) => refreshableJob.Log.LogException(x.Exception), TaskContinuationOptions.OnlyOnFaulted));
}
try {
TS.Task.WaitAll(tasks.ToArray());
}
catch (AggregateException ae) {
if (!ae.InnerExceptions.All(e => e is TaskCanceledException)) throw ae; // for some reason the WaitAll throws a AggregateException containg a TaskCanceledException. i don't know where it comes from, however the tasks all finish properly, so for now just ignore it
}
refreshableJob.Job.Modified = false;
}
finally {
refreshableJob.IsProgressing = false;
refreshableJob.Progress.Finish();
}
}
///
/// Uploads the local configuration file as plugin
///
private static Plugin UploadConfigurationFile(IHiveService service, List onlinePlugins) {
string configFilePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, Settings.Default.HLBinaryName + ".config");
string configFileName = Settings.Default.HLBinaryName + ".config";
byte[] hash;
byte[] data = File.ReadAllBytes(configFilePath);
using (SHA1 sha1 = SHA1.Create()) {
hash = sha1.ComputeHash(data);
}
Plugin configPlugin = new Plugin() { Name = "Configuration", Version = new Version(), Hash = hash };
PluginData configFile = new PluginData() { FileName = configFileName, Data = data };
IEnumerable onlineConfig = onlinePlugins.Where(p => p.Hash.SequenceEqual(hash));
if (onlineConfig.Count() > 0) {
return onlineConfig.First();
} else {
configPlugin.Id = service.AddPlugin(configPlugin, new List { configFile });
return configPlugin;
}
}
///
/// Uploads the given task and all its child-jobs while setting the proper parentJobId values for the childs
///
/// shall be null if its the root task
private void UploadTaskWithChildren(IProgress progress, HiveTask hiveTask, HiveTask parentHiveTask, IEnumerable groups, int[] taskCount, int totalJobCount, Guid configPluginId, Guid jobId, ILog log, bool isPrivileged, CancellationToken cancellationToken) {
taskUploadSemaphore.WaitOne();
bool semaphoreReleased = false;
try {
cancellationToken.ThrowIfCancellationRequested();
lock (jobCountLocker) {
taskCount[0]++;
}
TaskData taskData;
List plugins;
if (hiveTask.ItemTask.ComputeInParallel && (hiveTask.ItemTask.Item is Optimization.Experiment || hiveTask.ItemTask.Item is Optimization.BatchRun)) {
hiveTask.Task.IsParentTask = true;
hiveTask.Task.FinishWhenChildJobsFinished = true;
taskData = hiveTask.GetAsTaskData(true, out plugins);
} else {
if (hiveTask.ItemTask.Item is EngineAlgorithm && ((EngineAlgorithm)hiveTask.ItemTask.Item).Engine is IHiveEngine) {
hiveTask.Task.IsParentTask = true;
//TODO: this is a little hack so that this task does not get executed immediately because the taskdata will be uploaded twice
hiveTask.Task.State = TaskState.Aborted;
} else {
hiveTask.Task.IsParentTask = false;
}
hiveTask.Task.FinishWhenChildJobsFinished = false;
taskData = hiveTask.GetAsTaskData(false, out plugins);
}
cancellationToken.ThrowIfCancellationRequested();
TryAndRepeat(() => {
if (!cancellationToken.IsCancellationRequested) {
lock (pluginLocker) {
HiveServiceLocator.Instance.CallHiveService((s) => hiveTask.Task.PluginsNeededIds = PluginUtil.GetPluginDependencies(s, this.onlinePlugins, this.alreadyUploadedPlugins, plugins));
}
}
}, Settings.Default.MaxRepeatServiceCalls, "Failed to upload plugins");
cancellationToken.ThrowIfCancellationRequested();
hiveTask.Task.PluginsNeededIds.Add(configPluginId);
hiveTask.Task.JobId = jobId;
hiveTask.Task.IsPrivileged = isPrivileged;
log.LogMessage(string.Format("Uploading task ({0} kb, {1} objects)", taskData.Data.Count() / 1024, hiveTask.ItemTask.GetObjectGraphObjects().Count()));
TryAndRepeat(() => {
if (!cancellationToken.IsCancellationRequested) {
if (parentHiveTask != null) {
hiveTask.Task.Id = HiveServiceLocator.Instance.CallHiveService((s) => s.AddChildTask(parentHiveTask.Task.Id, hiveTask.Task, taskData));
} else {
hiveTask.Task.Id = HiveServiceLocator.Instance.CallHiveService((s) => s.AddTask(hiveTask.Task, taskData, groups.ToList()));
if (hiveTask.Task.State == TaskState.Aborted) {
//TODO: this is a very bad hack
if (hiveTask.ItemTask.Item is EngineAlgorithm && ((EngineAlgorithm)hiveTask.ItemTask.Item).Engine is IHiveEngine) {
IHiveEngine he = ((EngineAlgorithm)hiveTask.ItemTask.Item).Engine as IHiveEngine;
he.ParentTaskId = hiveTask.Task.Id;
taskData = hiveTask.GetAsTaskData(false, out plugins);
taskData.TaskId = hiveTask.Task.Id;
Task t = HiveServiceLocator.Instance.CallHiveService((s) => s.GetTask(hiveTask.Task.Id));
t.State = TaskState.Waiting;
HiveServiceLocator.Instance.CallHiveService((s) => s.UpdateTaskData(t, taskData));
}
}
}
}
}, Settings.Default.MaxRepeatServiceCalls, "Failed to add task", log);
cancellationToken.ThrowIfCancellationRequested();
lock (jobCountLocker) {
progress.ProgressValue = (double)taskCount[0] / totalJobCount;
progress.Status = string.Format("Uploaded task ({0} of {1})", taskCount[0], totalJobCount);
}
var tasks = new List();
foreach (HiveTask child in hiveTask.ChildHiveTasks) {
tasks.Add(TS.Task.Factory.StartNew((tuple) => {
var arguments = (Tuple)tuple;
UploadTaskWithChildren(progress, arguments.Item1, arguments.Item2, groups, taskCount, totalJobCount, configPluginId, jobId, log, isPrivileged, cancellationToken);
}, new Tuple(child, hiveTask))
.ContinueWith((x) => log.LogException(x.Exception), TaskContinuationOptions.OnlyOnFaulted));
}
taskUploadSemaphore.Release(); semaphoreReleased = true; // the semaphore has to be release before waitall!
try {
TS.Task.WaitAll(tasks.ToArray());
}
catch (AggregateException ae) {
if (!ae.InnerExceptions.All(e => e is TaskCanceledException)) throw ae; // for some reason the WaitAll throws a AggregateException containg a TaskCanceledException. i don't know where it comes from, however the tasks all finish properly, so for now just ignore it
}
}
finally {
if (!semaphoreReleased) taskUploadSemaphore.Release();
}
}
#endregion
#region Download Experiment
public static void LoadJob(RefreshableJob refreshableJob) {
var hiveExperiment = refreshableJob.Job;
refreshableJob.Progress = new Progress();
try {
refreshableJob.IsProgressing = true;
int totalJobCount = 0;
IEnumerable allTasks;
refreshableJob.Progress.Status = "Connecting to Server...";
// fetch all task objects to create the full tree of tree of HiveTask objects
refreshableJob.Progress.Status = "Downloading list of tasks...";
allTasks = HiveServiceLocator.Instance.CallHiveService(s => s.GetLightweightJobTasks(hiveExperiment.Id));
totalJobCount = allTasks.Count();
refreshableJob.Progress.Status = "Downloading tasks...";
TaskDownloader downloader = new TaskDownloader(allTasks.Select(x => x.Id));
downloader.StartAsync();
while (!downloader.IsFinished) {
refreshableJob.Progress.ProgressValue = downloader.FinishedCount / (double)totalJobCount;
refreshableJob.Progress.Status = string.Format("Downloading/deserializing tasks... ({0}/{1} finished)", downloader.FinishedCount, totalJobCount);
Thread.Sleep(500);
if (downloader.IsFaulted) {
throw downloader.Exception;
}
}
IDictionary allHiveTasks = downloader.Results;
var parents = allHiveTasks.Values.Where(x => !x.Task.ParentTaskId.HasValue);
refreshableJob.Progress.Status = "Downloading/deserializing complete. Displaying tasks...";
// build child-task tree
foreach (HiveTask hiveTask in parents) {
BuildHiveJobTree(hiveTask, allTasks, allHiveTasks);
}
refreshableJob.HiveTasks = new ItemCollection(parents);
if (refreshableJob.IsFinished()) {
refreshableJob.ExecutionState = Core.ExecutionState.Stopped;
} else {
refreshableJob.ExecutionState = Core.ExecutionState.Started;
}
refreshableJob.OnLoaded();
}
finally {
refreshableJob.IsProgressing = false;
}
}
private static void BuildHiveJobTree(HiveTask parentHiveTask, IEnumerable allTasks, IDictionary allHiveTasks) {
IEnumerable childTasks = from job in allTasks
where job.ParentTaskId.HasValue && job.ParentTaskId.Value == parentHiveTask.Task.Id
orderby job.DateCreated ascending
select job;
foreach (LightweightTask task in childTasks) {
HiveTask childHiveTask = allHiveTasks[task.Id];
parentHiveTask.AddChildHiveTask(childHiveTask);
BuildHiveJobTree(childHiveTask, allTasks, allHiveTasks);
}
}
#endregion
///
/// Converts a string which can contain Ids separated by ';' to a enumerable
///
private static IEnumerable ToResourceNameList(string resourceNames) {
if (!string.IsNullOrEmpty(resourceNames)) {
return resourceNames.Split(';');
} else {
return new List();
}
}
public static ItemTask LoadItemJob(Guid jobId) {
TaskData taskData = HiveServiceLocator.Instance.CallHiveService(s => s.GetTaskData(jobId));
try {
return PersistenceUtil.Deserialize(taskData.Data);
}
catch {
return null;
}
}
///
/// Executes the action. If it throws an exception it is repeated until repetition-count is reached.
/// If repetitions is -1, it is repeated infinitely.
///
public static void TryAndRepeat(Action action, int repetitions, string errorMessage, ILog log = null) {
while (true) {
try { action(); return; }
catch (Exception e) {
if (repetitions == 0) throw new HiveException(errorMessage, e);
if (log != null) log.LogMessage(string.Format("{0}: {1} - will try again!", errorMessage, e.ToString()));
repetitions--;
}
}
}
public static HiveItemCollection GetJobPermissions(Guid jobId) {
return HiveServiceLocator.Instance.CallHiveService((service) => {
IEnumerable jps = service.GetJobPermissions(jobId);
foreach (var hep in jps) {
hep.UnmodifiedGrantedUserNameUpdate(service.GetUsernameByUserId(hep.GrantedUserId));
}
return new HiveItemCollection(jps);
});
}
}
}