1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | //#define EXPORT_GRAPH
|
---|
23 | //#define LOG_CONSOLE
|
---|
24 | //#define LOG_FILE
|
---|
25 |
|
---|
26 | using System;
|
---|
27 | using System.Collections.Generic;
|
---|
28 | #if LOG_CONSOLE
|
---|
29 | using System.Diagnostics;
|
---|
30 | #endif
|
---|
31 | #if LOG_FILE
|
---|
32 | using System.Globalization;
|
---|
33 | using System.IO;
|
---|
34 | #endif
|
---|
35 | using System.Linq;
|
---|
36 | using System.Threading;
|
---|
37 | using HeuristicLab.Common;
|
---|
38 | using HeuristicLab.Core;
|
---|
39 | using HeuristicLab.Data;
|
---|
40 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
41 | using HeuristicLab.Parameters;
|
---|
42 | using HEAL.Attic;
|
---|
43 | using Tensorflow;
|
---|
44 | using Tensorflow.NumPy;
|
---|
45 | using static Tensorflow.Binding;
|
---|
46 | using static Tensorflow.KerasApi;
|
---|
47 | using DoubleVector = MathNet.Numerics.LinearAlgebra.Vector<double>;
|
---|
48 |
|
---|
49 | namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
|
---|
50 | [StorableType("63944BF6-62E5-4BE4-974C-D30AD8770F99")]
|
---|
51 | [Item("TensorFlowConstantOptimizationEvaluator", "")]
|
---|
52 | public class TensorFlowConstantOptimizationEvaluator : SymbolicRegressionConstantOptimizationEvaluator {
|
---|
53 | private const string MaximumIterationsName = "MaximumIterations";
|
---|
54 | private const string LearningRateName = "LearningRate";
|
---|
55 |
|
---|
56 | //private static readonly TF_DataType DataType = tf.float64;
|
---|
57 | private static readonly TF_DataType DataType = tf.float32;
|
---|
58 |
|
---|
59 | #region Parameter Properties
|
---|
60 | public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
|
---|
61 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumIterationsName]; }
|
---|
62 | }
|
---|
63 | public IFixedValueParameter<DoubleValue> LearningRateParameter {
|
---|
64 | get { return (IFixedValueParameter<DoubleValue>)Parameters[LearningRateName]; }
|
---|
65 | }
|
---|
66 | #endregion
|
---|
67 |
|
---|
68 | #region Properties
|
---|
69 | public int ConstantOptimizationIterations {
|
---|
70 | get { return ConstantOptimizationIterationsParameter.Value.Value; }
|
---|
71 | }
|
---|
72 | public double LearningRate {
|
---|
73 | get { return LearningRateParameter.Value.Value; }
|
---|
74 | }
|
---|
75 | #endregion
|
---|
76 |
|
---|
77 | public TensorFlowConstantOptimizationEvaluator()
|
---|
78 | : base() {
|
---|
79 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumIterationsName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree(0 indicates other or default stopping criterion).", new IntValue(10)));
|
---|
80 | Parameters.Add(new FixedValueParameter<DoubleValue>(LearningRateName, "", new DoubleValue(0.001)));
|
---|
81 | }
|
---|
82 |
|
---|
83 | protected TensorFlowConstantOptimizationEvaluator(TensorFlowConstantOptimizationEvaluator original, Cloner cloner)
|
---|
84 | : base(original, cloner) { }
|
---|
85 |
|
---|
86 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
87 | return new TensorFlowConstantOptimizationEvaluator(this, cloner);
|
---|
88 | }
|
---|
89 |
|
---|
90 | [StorableConstructor]
|
---|
91 | protected TensorFlowConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
|
---|
92 |
|
---|
93 | protected override ISymbolicExpressionTree OptimizeConstants(
|
---|
94 | ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows,
|
---|
95 | CancellationToken cancellationToken = default(CancellationToken), EvaluationsCounter counter = null) {
|
---|
96 | return OptimizeTree(tree,
|
---|
97 | problemData, rows,
|
---|
98 | ApplyLinearScalingParameter.ActualValue.Value, UpdateVariableWeights,
|
---|
99 | ConstantOptimizationIterations, LearningRate,
|
---|
100 | cancellationToken);
|
---|
101 | }
|
---|
102 |
|
---|
103 | public static ISymbolicExpressionTree OptimizeTree(ISymbolicExpressionTree tree,
|
---|
104 | IRegressionProblemData problemData, IEnumerable<int> rows,
|
---|
105 | bool applyLinearScaling, bool updateVariableWeights, int maxIterations, double learningRate,
|
---|
106 | CancellationToken cancellationToken = default(CancellationToken), IProgress<double> progress = null) {
|
---|
107 |
|
---|
108 | const bool eager = true;
|
---|
109 |
|
---|
110 | bool prepared = TreeToTensorConverter.TryPrepareTree(
|
---|
111 | tree,
|
---|
112 | problemData, rows.ToList(),
|
---|
113 | updateVariableWeights, applyLinearScaling,
|
---|
114 | eager,
|
---|
115 | out Dictionary<string, Tensor> inputFeatures, out Tensor target,
|
---|
116 | out Dictionary<ISymbolicExpressionTreeNode, ResourceVariable[]> variables);
|
---|
117 | if (!prepared)
|
---|
118 | return (ISymbolicExpressionTree)tree.Clone();
|
---|
119 |
|
---|
120 | var optimizer = keras.optimizers.Adam((float)learningRate);
|
---|
121 |
|
---|
122 | for (int i = 0; i < maxIterations; i++) {
|
---|
123 | if (cancellationToken.IsCancellationRequested) break;
|
---|
124 |
|
---|
125 | using var tape = tf.GradientTape();
|
---|
126 |
|
---|
127 | bool success = TreeToTensorConverter.TryEvaluate(
|
---|
128 | tree,
|
---|
129 | inputFeatures, variables,
|
---|
130 | updateVariableWeights, applyLinearScaling,
|
---|
131 | eager,
|
---|
132 | out Tensor prediction);
|
---|
133 | if (!success)
|
---|
134 | return (ISymbolicExpressionTree)tree.Clone();
|
---|
135 |
|
---|
136 | var loss = tf.reduce_mean(tf.square(target - prediction));
|
---|
137 |
|
---|
138 | progress?.Report(loss.ToArray<float>()[0]);
|
---|
139 |
|
---|
140 | var variablesList = variables.Values.SelectMany(x => x).ToList();
|
---|
141 | var gradients = tape.gradient(loss, variablesList);
|
---|
142 |
|
---|
143 | optimizer.apply_gradients(zip(gradients, variablesList));
|
---|
144 | }
|
---|
145 |
|
---|
146 | var cloner = new Cloner();
|
---|
147 | var newTree = cloner.Clone(tree);
|
---|
148 | var newConstants = variables.ToDictionary(
|
---|
149 | kvp => (ISymbolicExpressionTreeNode)cloner.GetClone(kvp.Key),
|
---|
150 | kvp => kvp.Value.Select(x => (double)(x.numpy().ToArray<float>()[0])).ToArray()
|
---|
151 | );
|
---|
152 | UpdateConstants(newTree, newConstants);
|
---|
153 |
|
---|
154 |
|
---|
155 | return newTree;
|
---|
156 |
|
---|
157 |
|
---|
158 |
|
---|
159 |
|
---|
160 |
|
---|
161 | // //int numRows = rows.Count();
|
---|
162 |
|
---|
163 |
|
---|
164 |
|
---|
165 |
|
---|
166 |
|
---|
167 |
|
---|
168 | // var variableLengths = problemData.AllowedInputVariables.ToDictionary(
|
---|
169 | // var => var,
|
---|
170 | // var => {
|
---|
171 | // if (problemData.Dataset.VariableHasType<double>(var)) return 1;
|
---|
172 | // if (problemData.Dataset.VariableHasType<DoubleVector>(var)) return problemData.Dataset.GetDoubleVectorValue(var, 0).Count;
|
---|
173 | // throw new NotSupportedException($"Type of variable {var} is not supported.");
|
---|
174 | // });
|
---|
175 |
|
---|
176 | // var variablesDict = problemData.AllowedInputVariables.ToDictionary(
|
---|
177 | // var => var,
|
---|
178 | // var => {
|
---|
179 | // if (problemData.Dataset.VariableHasType<double>(var)) {
|
---|
180 | // var data = problemData.Dataset.GetDoubleValues(var, rows).Select(x => (float)x).ToArray();
|
---|
181 | // return tf.convert_to_tensor(np.array(data).reshape(new Shape(numRows, 1)), DataType);
|
---|
182 | // } else if (problemData.Dataset.VariableHasType<DoubleVector>(var)) {
|
---|
183 | // var data = problemData.Dataset.GetDoubleVectorValues(var, rows).SelectMany(x => x.Select(y => (float)y)).ToArray();
|
---|
184 | // return tf.convert_to_tensor(np.array(data).reshape(new Shape(numRows, -1)), DataType);
|
---|
185 | // } else throw new NotSupportedException($"Type of the variable is not supported: {var}");
|
---|
186 | // }
|
---|
187 | // );
|
---|
188 |
|
---|
189 | // using var tape = tf.GradientTape(persistent: true);
|
---|
190 |
|
---|
191 | // bool success = TreeToTensorConverter.TryEvaluateEager(tree,
|
---|
192 | // numRows, variablesDict,
|
---|
193 | // updateVariableWeights, applyLinearScaling,
|
---|
194 | // out Tensor prediction,
|
---|
195 | // out Dictionary<Tensor, string> parameters, out List<ResourceVariable> variables);
|
---|
196 |
|
---|
197 | // //bool success = TreeToTensorConverter.TryConvert(tree,
|
---|
198 | // // numRows, variableLengths,
|
---|
199 | // // updateVariableWeights, applyLinearScaling,
|
---|
200 | // // out Tensor prediction,
|
---|
201 | // // out Dictionary<Tensor, string> parameters, out List<Tensor> variables);
|
---|
202 |
|
---|
203 | // if (!success)
|
---|
204 | // return (ISymbolicExpressionTree)tree.Clone();
|
---|
205 |
|
---|
206 | // //var target = tf.placeholder(DataType, new Shape(numRows), name: problemData.TargetVariable);
|
---|
207 | // var targetData = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows).Select(x => (float)x).ToArray();
|
---|
208 | // var target = tf.convert_to_tensor(np.array(targetData).reshape(new Shape(numRows)), DataType);
|
---|
209 | // // MSE
|
---|
210 | // var cost = tf.reduce_sum(tf.square(prediction - target));
|
---|
211 |
|
---|
212 | // tape.watch(cost);
|
---|
213 |
|
---|
214 | // //var optimizer = tf.train.AdamOptimizer((float)learningRate);
|
---|
215 | // //var optimizer = tf.train.AdamOptimizer(tf.constant(learningRate, DataType));
|
---|
216 | // //var optimizer = tf.train.GradientDescentOptimizer((float)learningRate);
|
---|
217 | // //var optimizer = tf.train.GradientDescentOptimizer(tf.constant(learningRate, DataType));
|
---|
218 | // //var optimizer = tf.train.GradientDescentOptimizer((float)learningRate);
|
---|
219 | // //var optimizer = tf.train.AdamOptimizer((float)learningRate);
|
---|
220 | // //var optimizationOperation = optimizer.minimize(cost);
|
---|
221 | // var optimizer = keras.optimizers.Adam((float)learningRate);
|
---|
222 |
|
---|
223 | // #if EXPORT_GRAPH
|
---|
224 | // //https://github.com/SciSharp/TensorFlow.NET/wiki/Debugging
|
---|
225 | // tf.train.export_meta_graph(@"C:\temp\TFboard\graph.meta", as_text: false,
|
---|
226 | // clear_devices: true, clear_extraneous_savers: false, strip_default_attrs: true);
|
---|
227 | //#endif
|
---|
228 |
|
---|
229 | // //// features as feed items
|
---|
230 | // //var variablesFeed = new Hashtable();
|
---|
231 | // //foreach (var kvp in parameters) {
|
---|
232 | // // var variable = kvp.Key;
|
---|
233 | // // var variableName = kvp.Value;
|
---|
234 | // // if (problemData.Dataset.VariableHasType<double>(variableName)) {
|
---|
235 | // // var data = problemData.Dataset.GetDoubleValues(variableName, rows).Select(x => (float)x).ToArray();
|
---|
236 | // // variablesFeed.Add(variable, np.array(data).reshape(new Shape(numRows, 1)));
|
---|
237 | // // } else if (problemData.Dataset.VariableHasType<DoubleVector>(variableName)) {
|
---|
238 | // // var data = problemData.Dataset.GetDoubleVectorValues(variableName, rows).SelectMany(x => x.Select(y => (float)y)).ToArray();
|
---|
239 | // // variablesFeed.Add(variable, np.array(data).reshape(new Shape(numRows, -1)));
|
---|
240 | // // } else
|
---|
241 | // // throw new NotSupportedException($"Type of the variable is not supported: {variableName}");
|
---|
242 | // //}
|
---|
243 | // //var targetData = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows).Select(x => (float)x).ToArray();
|
---|
244 | // //variablesFeed.Add(target, np.array(targetData));
|
---|
245 |
|
---|
246 |
|
---|
247 | // List<NDArray> constants;
|
---|
248 | // //using (var session = tf.Session()) {
|
---|
249 |
|
---|
250 | //#if LOG_FILE
|
---|
251 | // var directoryName = $"C:\\temp\\TFboard\\logdir\\manual_{DateTime.Now.ToString("yyyyMMddHHmmss")}_{maxIterations}_{learningRate.ToString(CultureInfo.InvariantCulture)}";
|
---|
252 | // Directory.CreateDirectory(directoryName);
|
---|
253 | // var costsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Costs.csv")));
|
---|
254 | // var weightsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Weights.csv")));
|
---|
255 | // var gradientsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Gradients.csv")));
|
---|
256 | //#endif
|
---|
257 |
|
---|
258 | // //session.run(tf.global_variables_initializer());
|
---|
259 |
|
---|
260 | //#if LOG_CONSOLE || LOG_FILE
|
---|
261 | // var gradients = optimizer.compute_gradients(cost);
|
---|
262 | //#endif
|
---|
263 |
|
---|
264 | // //var vars = variables.Select(v => session.run(v, variablesFeed)[0].ToArray<float>()[0]).ToList();
|
---|
265 | // //var gradient = optimizer.compute_gradients(cost)
|
---|
266 | // // .Where(g => g.Item1 != null)
|
---|
267 | // // //.Select(g => session.run(g.Item1, variablesFeed)[0].GetValue<float>(0)).
|
---|
268 | // // .Select(g => session.run(g.Item1, variablesFeed)[0].ToArray<float>()[0])
|
---|
269 | // // .ToList();
|
---|
270 |
|
---|
271 | // //var gradientPrediction = optimizer.compute_gradients(prediction)
|
---|
272 | // // .Where(g => g.Item1 != null)
|
---|
273 | // // .Select(g => session.run(g.Item1, variablesFeed)[0].ToArray<float>()[0])
|
---|
274 | // // .ToList();
|
---|
275 |
|
---|
276 |
|
---|
277 | // //progress?.Report(session.run(cost, variablesFeed)[0].ToArray<float>()[0]);
|
---|
278 | // progress?.Report(cost.ToArray<float>()[0]);
|
---|
279 |
|
---|
280 |
|
---|
281 |
|
---|
282 |
|
---|
283 |
|
---|
284 | //#if LOG_CONSOLE
|
---|
285 | // Trace.WriteLine("Costs:");
|
---|
286 | // Trace.WriteLine($"MSE: {session.run(cost, variablesFeed)[0].ToString(true)}");
|
---|
287 |
|
---|
288 | // Trace.WriteLine("Weights:");
|
---|
289 | // foreach (var v in variables) {
|
---|
290 | // Trace.WriteLine($"{v.name}: {session.run(v).ToString(true)}");
|
---|
291 | // }
|
---|
292 |
|
---|
293 | // Trace.WriteLine("Gradients:");
|
---|
294 | // foreach (var t in gradients) {
|
---|
295 | // Trace.WriteLine($"{t.Item2.name}: {session.run(t.Item1, variablesFeed)[0].ToString(true)}");
|
---|
296 | // }
|
---|
297 | //#endif
|
---|
298 |
|
---|
299 | //#if LOG_FILE
|
---|
300 | // costsWriter.WriteLine("MSE");
|
---|
301 | // costsWriter.WriteLine(session.run(cost, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture));
|
---|
302 |
|
---|
303 | // weightsWriter.WriteLine(string.Join(";", variables.Select(v => v.name)));
|
---|
304 | // weightsWriter.WriteLine(string.Join(";", variables.Select(v => session.run(v).ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
|
---|
305 |
|
---|
306 | // gradientsWriter.WriteLine(string.Join(";", gradients.Select(t => t.Item2.Name)));
|
---|
307 | // gradientsWriter.WriteLine(string.Join(";", gradients.Select(t => session.run(t.Item1, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
|
---|
308 | //#endif
|
---|
309 |
|
---|
310 | // for (int i = 0; i < maxIterations; i++) {
|
---|
311 | // if (cancellationToken.IsCancellationRequested)
|
---|
312 | // break;
|
---|
313 |
|
---|
314 |
|
---|
315 | // var gradients = tape.gradient(cost, variables);
|
---|
316 | // //optimizer.apply_gradients(gradients.Zip(variables, Tuple.Create<Tensor, IVariableV1>).ToArray());
|
---|
317 | // optimizer.apply_gradients(zip(gradients, variables));
|
---|
318 |
|
---|
319 |
|
---|
320 | // //session.run(optimizationOperation, variablesFeed);
|
---|
321 |
|
---|
322 | // progress?.Report(cost.ToArray<float>()[0]);
|
---|
323 | // //progress?.Report(session.run(cost, variablesFeed)[0].ToArray<float>()[0]);
|
---|
324 |
|
---|
325 | //#if LOG_CONSOLE
|
---|
326 | // Trace.WriteLine("Costs:");
|
---|
327 | // Trace.WriteLine($"MSE: {session.run(cost, variablesFeed)[0].ToString(true)}");
|
---|
328 |
|
---|
329 | // Trace.WriteLine("Weights:");
|
---|
330 | // foreach (var v in variables) {
|
---|
331 | // Trace.WriteLine($"{v.name}: {session.run(v).ToString(true)}");
|
---|
332 | // }
|
---|
333 |
|
---|
334 | // Trace.WriteLine("Gradients:");
|
---|
335 | // foreach (var t in gradients) {
|
---|
336 | // Trace.WriteLine($"{t.Item2.name}: {session.run(t.Item1, variablesFeed)[0].ToString(true)}");
|
---|
337 | // }
|
---|
338 | //#endif
|
---|
339 |
|
---|
340 | //#if LOG_FILE
|
---|
341 | // costsWriter.WriteLine(session.run(cost, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture));
|
---|
342 | // weightsWriter.WriteLine(string.Join(";", variables.Select(v => session.run(v).ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
|
---|
343 | // gradientsWriter.WriteLine(string.Join(";", gradients.Select(t => session.run(t.Item1, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
|
---|
344 | //#endif
|
---|
345 | // }
|
---|
346 |
|
---|
347 | //#if LOG_FILE
|
---|
348 | // costsWriter.Close();
|
---|
349 | // weightsWriter.Close();
|
---|
350 | // gradientsWriter.Close();
|
---|
351 | //#endif
|
---|
352 | // //constants = variables.Select(v => session.run(v)).ToList();
|
---|
353 | // constants = variables.Select(v => v.numpy()).ToList();
|
---|
354 | // //}
|
---|
355 |
|
---|
356 | // if (applyLinearScaling)
|
---|
357 | // constants = constants.Skip(2).ToList();
|
---|
358 | // var newTree = (ISymbolicExpressionTree)tree.Clone();
|
---|
359 | // UpdateConstants(newTree, constants, updateVariableWeights);
|
---|
360 |
|
---|
361 | // return newTree;
|
---|
362 | }
|
---|
363 |
|
---|
364 | private static void UpdateConstants(ISymbolicExpressionTree tree, Dictionary<ISymbolicExpressionTreeNode, double[]> constants) {
|
---|
365 | foreach (var kvp in constants) {
|
---|
366 | var node = kvp.Key;
|
---|
367 | var value = kvp.Value;
|
---|
368 |
|
---|
369 | switch (node) {
|
---|
370 | case ConstantTreeNode constantTreeNode:
|
---|
371 | constantTreeNode.Value = value[0];
|
---|
372 | break;
|
---|
373 | case VariableTreeNodeBase variableTreeNodeBase:
|
---|
374 | variableTreeNodeBase.Weight = value[0];
|
---|
375 | break;
|
---|
376 | case FactorVariableTreeNode factorVarTreeNode: {
|
---|
377 | for (int i = 0; i < factorVarTreeNode.Weights.Length; i++) {
|
---|
378 | factorVarTreeNode.Weights[i] = value[i];
|
---|
379 | }
|
---|
380 | break;
|
---|
381 | }
|
---|
382 | }
|
---|
383 | }
|
---|
384 | }
|
---|
385 |
|
---|
386 | //private static void UpdateConstants(ISymbolicExpressionTree tree, IList<NDArray> constants, bool updateVariableWeights) {
|
---|
387 | // int i = 0;
|
---|
388 | // foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
|
---|
389 | // if (node is ConstantTreeNode constantTreeNode) {
|
---|
390 | // constantTreeNode.Value = constants[i++].ToArray<float>()[0];
|
---|
391 | // } else if (node is VariableTreeNodeBase variableTreeNodeBase && updateVariableWeights) {
|
---|
392 | // variableTreeNodeBase.Weight = constants[i++].ToArray<float>()[0];
|
---|
393 | // } else if (node is FactorVariableTreeNode factorVarTreeNode && updateVariableWeights) {
|
---|
394 | // for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
|
---|
395 | // factorVarTreeNode.Weights[j] = constants[i++].ToArray<float>()[0];
|
---|
396 | // }
|
---|
397 | // }
|
---|
398 | //}
|
---|
399 |
|
---|
400 | public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
|
---|
401 | return TreeToTensorConverter.IsCompatible(tree);
|
---|
402 | }
|
---|
403 | }
|
---|
404 | } |
---|