Free cookie consent management tool by TermsFeed Policy Generator

source: branches/MemPRAlgorithm/HeuristicLab.ParallelEngine/3.3/ParallelEngine.cs @ 15835

Last change on this file since 15835 was 14185, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 5.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Threading;
25using System.Threading.Tasks;
26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
29
30namespace HeuristicLab.ParallelEngine {
31  /// <summary>
32  /// Represents an engine that executes its steps in parallel (if possible) using multiple threads.
33  /// This engine is suitable for parallel processing on shared memory systems which provide multiple cores.
34  /// </summary>
35  [StorableClass]
36  [Item("Parallel Engine", "Engine for parallel execution of algorithms using multiple threads (suitable for shared memory systems with multiple cores).")]
37  public class ParallelEngine : Engine {
38    private CancellationToken cancellationToken;
39    private ParallelOptions parallelOptions;
40
41    [Storable(DefaultValue = -1)]
42    private int degreeOfParallelism;
43    public int DegreeOfParallelism {
44      get { return degreeOfParallelism; }
45      set {
46        if (degreeOfParallelism != value) {
47          degreeOfParallelism = value;
48          OnDegreeOfParallelismChanged();
49        }
50      }
51    }
52
53    [StorableConstructor]
54    protected ParallelEngine(bool deserializing) : base(deserializing) { }
55    protected ParallelEngine(ParallelEngine original, Cloner cloner)
56      : base(original, cloner) {
57      this.DegreeOfParallelism = original.DegreeOfParallelism;
58    }
59    public ParallelEngine()
60      : base() {
61      this.degreeOfParallelism = -1;
62    }
63
64    public override IDeepCloneable Clone(Cloner cloner) {
65      return new ParallelEngine(this, cloner);
66    }
67
68    public event EventHandler DegreeOfParallelismChanged;
69    protected void OnDegreeOfParallelismChanged() {
70      var handler = DegreeOfParallelismChanged;
71      if (handler != null) handler(this, EventArgs.Empty);
72    }
73
74
75    protected override void Run(CancellationToken cancellationToken) {
76      this.cancellationToken = cancellationToken;
77      parallelOptions = new ParallelOptions();
78      parallelOptions.MaxDegreeOfParallelism = DegreeOfParallelism;
79      parallelOptions.CancellationToken = cancellationToken;
80      Run(ExecutionStack);
81    }
82
83    private void Run(object state) {
84      Stack<IOperation> executionStack = (Stack<IOperation>)state;
85      IOperation next;
86      OperationCollection coll;
87      IAtomicOperation operation;
88
89      while (executionStack.Count > 0) {
90        cancellationToken.ThrowIfCancellationRequested();
91
92        next = executionStack.Pop();
93        if (next is OperationCollection) {
94          coll = (OperationCollection)next;
95          if (coll.Parallel) {
96            Stack<IOperation>[] stacks = new Stack<IOperation>[coll.Count];
97            for (int i = 0; i < coll.Count; i++) {
98              stacks[i] = new Stack<IOperation>();
99              stacks[i].Push(coll[i]);
100            }
101            try {
102              Parallel.ForEach(stacks, parallelOptions, Run);
103            }
104            catch (OperationCanceledException ex) {
105              OperationCollection remaining = new OperationCollection() { Parallel = true };
106              for (int i = 0; i < stacks.Length; i++) {
107                if (stacks[i].Count == 1)
108                  remaining.Add(stacks[i].Pop());
109                if (stacks[i].Count > 1) {
110                  OperationCollection ops = new OperationCollection();
111                  while (stacks[i].Count > 0)
112                    ops.Add(stacks[i].Pop());
113                  remaining.Add(ops);
114                }
115              }
116              if (remaining.Count > 0) executionStack.Push(remaining);
117              throw ex;
118            }
119          } else {
120            for (int i = coll.Count - 1; i >= 0; i--)
121              if (coll[i] != null) executionStack.Push(coll[i]);
122          }
123        } else if (next is IAtomicOperation) {
124          operation = (IAtomicOperation)next;
125          try {
126            next = operation.Operator.Execute((IExecutionContext)operation, cancellationToken);
127          }
128          catch (Exception ex) {
129            executionStack.Push(operation);
130            if (ex is OperationCanceledException) throw ex;
131            else throw new OperatorExecutionException(operation.Operator, ex);
132          }
133          if (next != null) executionStack.Push(next);
134
135          if (operation.Operator.Breakpoint) {
136            string message = string.Format("Breakpoint: {0}", operation.Operator.Name != string.Empty ? operation.Operator.Name : operation.Operator.ItemName);
137            Log.LogMessage(message);
138            throw new OperationCanceledException(message);
139          }
140        }
141      }
142    }
143  }
144}
Note: See TracBrowser for help on using the repository browser.