using HeuristicLab.Clients.Hive.WebJobManager.Services;
using HeuristicLab.Optimization;
using Microsoft.AspNet.SignalR;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace HeuristicLab.Clients.Hive.WebJobManager
{
///
/// SignalR Hub for displaying the progress when uploading a Job.
/// Also used to change child distribution and priority for inner tasks.
///
public class ProgressHub : Hub
{
private WebLoginService weblog;
private Guid userId;
private FileOpeningService fileopener;
private void loader()
{
weblog = WebLoginService.Instance;
string uid = Context.QueryString["userid"];
if (uid == null || uid == "" || Guid.Parse(uid) == Guid.Empty)
{
userId = Guid.Empty;
}
else {
userId = Guid.Parse(uid);
fileopener = weblog.getFileOpener(userId);
}
}
///
/// First message from client
///
/// Client message
public void HandleMessage(string receivedString)
{
loader();
Clients.Caller.processMessage("Connection Established");
fileopener.Job.Progress.StatusChanged += runHub;
}
///
/// Changes name for the current job to be created
///
/// Job name
public void ChangeNameResource(string name, string resource)
{
loader();
if (name != null)
fileopener.Job.Job.Name = name;
if(resource != null && resource != "")
{
fileopener.Job.Job.ResourceNames += "/" + resource;
}
}
///
/// Toggles distribute child task for a current task
///
/// 2-dimensional int array.
/// First dimension is depth (arr.length == 4 means the item is 4 nodes deep)
/// Second contains expirement info or batch info. (arr[][0] contains index to select the right experiment sub task,
/// arr[][1] contains index to select right batch run subtask)
public void ToggleChild(int[][] arr)
{
loader();
HiveTask current = fileopener.Job.HiveTasks.ToList()[0];
if (arr.Length == 0)
{//check if upper job
current.ItemTask.ComputeInParallel = !current.ItemTask.ComputeInParallel;
}
else {
for (var i = 0; i < arr.Length; i++)
{
//loop for depth
if (i == arr.Length - 1)//end of depth loop, right current is selected
{
if (current.ItemTask.Item is BatchRun)
{
current.ChildHiveTasks[arr[i][1]].ItemTask.ComputeInParallel = !current.ChildHiveTasks[arr[i][1]].ItemTask.ComputeInParallel;
}
else if (current.ItemTask.Item is Experiment)
{
current.ChildHiveTasks[arr[i][0]].ItemTask.ComputeInParallel = !current.ChildHiveTasks[arr[i][0]].ItemTask.ComputeInParallel;
}
}
else {//not deep enough, select right path
if (current.ItemTask.Item is BatchRun)
current = current.ChildHiveTasks[arr[i][1]]; // select right batch
else if (current.ItemTask.Item is Experiment)
current = current.ChildHiveTasks[arr[i][0]]; // select right sub task from experiment
}
}
}
}
///
/// Change priority for a node
///
/// 2-dimensional int array.
/// First dimension is depth (arr.length == 4 means the item is 4 nodes deep)
/// Second contains expirement info or batch info. (arr[][0] contains index to select the right experiment sub task,
/// arr[][1] contains index to select right batch run subtask)
/// Selected priority
public void ChangePriority(int[][] arr, int prior)
{
loader();
HiveTask current = fileopener.Job.HiveTasks.ToList()[0];
if (arr.Length == 0)
{//check if upper job
current.Task.Priority = prior;
}
else {
for (var i = 0; i < arr.Length; i++)
{//loop for depth
if (i == arr.Length - 1)
{//Right depth reached, change priority for current
if (current.ItemTask.Item is BatchRun)
{
current.ChildHiveTasks[arr[i][1]].Task.Priority = prior;
}
else if (current.ItemTask.Item is Experiment)
{
current.ChildHiveTasks[arr[i][0]].Task.Priority = prior;
}
}
else {//not deep enough, choose right path
if (current.ItemTask.Item is BatchRun)
current = current.ChildHiveTasks[arr[i][1]]; // select right batch
else if (current.ItemTask.Item is Experiment)
current = current.ChildHiveTasks[arr[i][0]]; // select right sub task from experiment
}
}
}
}
///
/// Used by event 'ProgressChanged' from current uploading Job
///
///
///
public void runHub(object sender, EventArgs e)
{
loader();
int value = 0;
switch (fileopener.Job.Progress.Status)
{
case "Connecting to server...":
value = 0;
break;
case "Uploading Job...":
value = 10;
break;
case "Uploading plugins...":
value = 30;
break;
case "Uploading tasks...":
value = 50;
break;
case "Upload finished":
value = 100;
break;
default://Tasks are uploading individually
value = (int)(50 + (40 * fileopener.Job.Progress.ProgressValue));
break;
}
//send info to client
Clients.Caller.processMessage(fileopener.Job.Progress.Status, value);
}
}
}