#region License Information /* HeuristicLab * Copyright (C) 2002-2011 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Collections.Generic; using System.Configuration; using System.IO; using System.Linq; using System.Threading; using HeuristicLab.Clients.Hive.Jobs; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.PluginInfrastructure; namespace HeuristicLab.Clients.Hive { // todo: rename from ExpMgrClient to ExperimentManagerClient [Item("ExperimentManagerClient", "Hive experiment manager client.")] public sealed class ExperimentManagerClient : IContent { private static ExperimentManagerClient instance; public static ExperimentManagerClient Instance { get { if (instance == null) instance = new ExperimentManagerClient(); return instance; } } #region Properties private ItemCollection hiveExperiments; public ItemCollection HiveExperiments { get { return hiveExperiments; } set { hiveExperiments = value; } } private List onlinePlugins; public List OnlinePlugins { get { return onlinePlugins; } set { onlinePlugins = value; } } private List alreadyUploadedPlugins; public List AlreadyUploadedPlugins { get { return alreadyUploadedPlugins; } set { alreadyUploadedPlugins = value; } } #endregion public ExperimentManagerClient() { } #region Refresh public void Refresh() { OnRefreshing(); hiveExperiments = new HiveItemCollection(); try { hiveExperiments.AddRange(ServiceLocator.Instance.CallHiveService>(s => s.GetHiveExperiments()).OrderBy(x => x.Name)); } finally { OnRefreshed(); } } public void RefreshAsync(Action exceptionCallback) { var call = new Func(delegate() { try { Refresh(); } catch (Exception ex) { return ex; } return null; }); call.BeginInvoke(delegate(IAsyncResult result) { Exception ex = call.EndInvoke(result); if (ex != null) exceptionCallback(ex); }, null); } #endregion #region Store public static void Store(IHiveItem item) { if (item.Id == Guid.Empty) { if (item is HiveExperiment) { ExperimentManagerClient.Instance.UploadExperiment((HiveExperiment)item); } } else { if (item is HiveExperiment) ServiceLocator.Instance.CallHiveService(s => s.UpdateHiveExperiment((HiveExperiment)item)); } } public static void StoreAsync(Action exceptionCallback, IHiveItem item) { var call = new Func(delegate() { try { Store(item); } catch (Exception ex) { return ex; } return null; }); call.BeginInvoke(delegate(IAsyncResult result) { Exception ex = call.EndInvoke(result); if (ex != null) exceptionCallback(ex); }, null); } #endregion #region Delete public static void Delete(IHiveItem item) { if (item is HiveExperiment) ServiceLocator.Instance.CallHiveService(s => s.DeleteHiveExperiment(item.Id)); item.Id = Guid.Empty; } #endregion #region Events public event EventHandler Refreshing; private void OnRefreshing() { EventHandler handler = Refreshing; if (handler != null) handler(this, EventArgs.Empty); } public event EventHandler Refreshed; private void OnRefreshed() { var handler = Refreshed; if (handler != null) handler(this, EventArgs.Empty); } #endregion public static void StartExperiment(Action exceptionCallback, HiveExperiment hiveExperiment) { ExperimentManagerClient.StoreAsync( new Action((Exception ex) => { hiveExperiment.ExecutionState = ExecutionState.Prepared; exceptionCallback(ex); }), hiveExperiment); hiveExperiment.ExecutionState = ExecutionState.Started; } public static void PauseExperiment(HiveExperiment hiveExperiment) { ServiceLocator.Instance.CallHiveService(service => { foreach (HiveJob job in hiveExperiment.GetAllHiveJobs()) { if (job.Job.State != JobState.Finished && job.Job.State != JobState.Aborted && job.Job.State != JobState.Failed) service.PauseJob(job.Job.Id); } }); hiveExperiment.ExecutionState = ExecutionState.Paused; } public static void StopExperiment(HiveExperiment hiveExperiment) { ServiceLocator.Instance.CallHiveService(service => { foreach (HiveJob job in hiveExperiment.GetAllHiveJobs()) { if (job.Job.State != JobState.Finished && job.Job.State != JobState.Aborted && job.Job.State != JobState.Failed) service.StopJob(job.Job.Id); } }); // execution state does not need to be set. it will be set to Stopped, when all jobs have been downloaded } #region Upload Experiment private void UploadExperiment(HiveExperiment hiveExperiment) { try { hiveExperiment.Progress = new Progress("Connecting to server..."); hiveExperiment.IsProgressing = true; ServiceLocator.Instance.CallHiveService(service => { IEnumerable resourceNames = ToResourceNameList(hiveExperiment.ResourceNames); var resourceIds = new List(); foreach (var resourceName in resourceNames) { Guid resourceId = service.GetResourceId(resourceName); if (resourceId == Guid.Empty) { throw new ResourceNotFoundException(string.Format("Could not find the resource '{0}'", resourceName)); } resourceIds.Add(resourceId); } foreach (HiveJob hiveJob in hiveExperiment.HiveJobs) { hiveJob.SetIndexInParentOptimizerList(null); } // upload HiveExperiment hiveExperiment.Progress.Status = "Uploading HiveExperiment..."; hiveExperiment.Id = service.AddHiveExperiment(hiveExperiment); int totalJobCount = hiveExperiment.GetAllHiveJobs().Count(); int jobCount = 0; // upload plugins hiveExperiment.Progress.Status = "Uploading plugins..."; this.OnlinePlugins = service.GetPlugins(); this.AlreadyUploadedPlugins = new List(); Plugin configFilePlugin = UploadConfigurationFile(service); this.alreadyUploadedPlugins.Add(configFilePlugin); // upload jobs hiveExperiment.Progress.Status = "Uploading jobs..."; foreach (HiveJob hiveJob in hiveExperiment.HiveJobs) { UploadJobWithChildren(hiveExperiment.Progress, service, hiveJob, null, resourceIds, ref jobCount, totalJobCount, configFilePlugin.Id, hiveExperiment.UseLocalPlugins, hiveExperiment.Id); } if (hiveExperiment.RefreshAutomatically) hiveExperiment.StartResultPolling(); }); } finally { hiveExperiment.IsProgressing = false; } } /// /// Uploads the local configuration file as plugin /// private static Plugin UploadConfigurationFile(IHiveService service) { string exeFilePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "HeuristicLab 3.3.exe"); string configFileName = Path.GetFileName(ConfigurationManager.OpenExeConfiguration(exeFilePath).FilePath); string configFilePath = ConfigurationManager.OpenExeConfiguration(exeFilePath).FilePath; Plugin configPlugin = new Plugin() { Name = "Configuration", IsLocal = true, Version = new Version() }; PluginData configFile = new PluginData() { FileName = configFileName, Data = File.ReadAllBytes(configFilePath) }; configPlugin.Id = service.AddPlugin(configPlugin, new List { configFile }); return configPlugin; } /// /// Uploads the given job and all its child-jobs while setting the proper parentJobId values for the childs /// /// /// /// shall be null if its the root job /// private void UploadJobWithChildren(IProgress progress, IHiveService service, HiveJob hiveJob, HiveJob parentHiveJob, IEnumerable groups, ref int jobCount, int totalJobCount, Guid configPluginId, bool useLocalPlugins, Guid hiveExperimentId) { jobCount++; progress.Status = string.Format("Serializing job {0} of {1}", jobCount, totalJobCount); JobData jobData; List plugins; if (hiveJob.OptimizerJob.ComputeInParallel && (hiveJob.OptimizerJob.Optimizer is Optimization.Experiment || hiveJob.OptimizerJob.Optimizer is Optimization.BatchRun)) { hiveJob.Job.IsParentJob = true; hiveJob.Job.FinishWhenChildJobsFinished = true; hiveJob.OptimizerJob.CollectChildJobs = false; // don't collect child-jobs on slaves jobData = hiveJob.GetAsJobData(true, out plugins); } else { hiveJob.Job.IsParentJob = false; hiveJob.Job.FinishWhenChildJobsFinished = false; jobData = hiveJob.GetAsJobData(false, out plugins); } hiveJob.Job.PluginsNeededIds = PluginUtil.GetPluginDependencies(service, this.onlinePlugins, this.alreadyUploadedPlugins, plugins, useLocalPlugins); hiveJob.Job.PluginsNeededIds.Add(configPluginId); hiveJob.Job.HiveExperimentId = hiveExperimentId; progress.Status = string.Format("Uploading job {0} of {1} ({2} kb)", jobCount, totalJobCount, jobData.Data.Count() / 1024); progress.ProgressValue = (double)jobCount / totalJobCount; if (parentHiveJob != null) { hiveJob.Job.Id = service.AddChildJob(parentHiveJob.Job.Id, hiveJob.Job, jobData); } else { hiveJob.Job.Id = service.AddJob(hiveJob.Job, jobData, groups.ToList()); } foreach (HiveJob child in hiveJob.ChildHiveJobs) { UploadJobWithChildren(progress, service, child, hiveJob, groups, ref jobCount, totalJobCount, configPluginId, useLocalPlugins, hiveExperimentId); } } #endregion #region Download Experiment public static void LoadExperiment(HiveExperiment hiveExperiment) { hiveExperiment.Progress = new Progress(); try { hiveExperiment.IsProgressing = true; int totalJobCount = 0; IEnumerable allJobs; hiveExperiment.Progress.Status = "Connecting to Server..."; // fetch all Job objects to create the full tree of tree of HiveJob objects hiveExperiment.Progress.Status = "Downloading list of jobs..."; allJobs = ServiceLocator.Instance.CallHiveService(s => s.GetLightweightExperimentJobs(hiveExperiment.Id)); totalJobCount = allJobs.Count(); HiveJobDownloader downloader = new HiveJobDownloader(allJobs.Select(x => x.Id)); downloader.StartAsync(); while (!downloader.IsFinished) { hiveExperiment.Progress.ProgressValue = downloader.FinishedCount / (double)totalJobCount; hiveExperiment.Progress.Status = string.Format("Downloading/deserializing jobs... ({0}/{1} finished)", downloader.FinishedCount, totalJobCount); Thread.Sleep(500); } IDictionary allHiveJobs = downloader.Results; hiveExperiment.HiveJobs = new ItemCollection(allHiveJobs.Values.Where(x => !x.Job.ParentJobId.HasValue)); //hiveExperiment.HiveJobs = allHiveJobs[hiveExperiment.RootJobId]; if (hiveExperiment.IsFinished()) { //hiveExperiment.ExecutionTime = hiveExperiment.HiveJobs.Max(.Job.DateFinished.Value - hiveExperiment.HiveJobs.Job.DateCreated.Value; //hiveExperiment.lastUpdateTime = hiveExperiment.HiveJob.Job.DateFinished.Value; hiveExperiment.ExecutionState = Core.ExecutionState.Stopped; //OnStopped(); // todo: stop timer } else { //hiveExperiment.ExecutionTime = hiveExperiment.HiveJobs.Job.DateCreated.HasValue ? DateTime.Now - hiveExperiment.HiveJobs.Job.DateCreated.Value : TimeSpan.Zero; //hiveExperiment.lastUpdateTime = DateTime.Now; hiveExperiment.ExecutionState = Core.ExecutionState.Started; //OnStarted(); // todo: start timer } hiveExperiment.UpdateTotalExecutionTime(); // build child-job tree foreach (HiveJob hiveJob in hiveExperiment.HiveJobs) { BuildHiveJobTree(hiveJob, allJobs, allHiveJobs); } hiveExperiment.UpdateTotalExecutionTime(); if (hiveExperiment.ExecutionState != ExecutionState.Stopped) { hiveExperiment.RefreshAutomatically = true; } } finally { hiveExperiment.IsProgressing = false; } } private static void BuildHiveJobTree(HiveJob parentHiveJob, IEnumerable allJobs, IDictionary allHiveJobs) { IEnumerable childJobs = from job in allJobs where job.ParentJobId.HasValue && job.ParentJobId.Value == parentHiveJob.Job.Id orderby job.DateCreated ascending select job; foreach (LightweightJob job in childJobs) { HiveJob childHiveJob = allHiveJobs[job.Id]; parentHiveJob.AddChildHiveJob(childHiveJob); BuildHiveJobTree(childHiveJob, allJobs, allHiveJobs); } } #endregion /// /// Converts a string which can contain Ids separated by ';' to a enumerable /// private static IEnumerable ToResourceNameList(string resourceNames) { if (!string.IsNullOrEmpty(resourceNames)) { return resourceNames.Split(';'); } else { return new List(); } } public static OptimizerJob LoadOptimizerJob(Guid jobId) { JobData jobData = ServiceLocator.Instance.CallHiveService(s => s.GetJobData(jobId)); try { return PersistenceUtil.Deserialize(jobData.Data); } catch { return null; } } } }