Free cookie consent management tool by TermsFeed Policy Generator

Changeset 18240


Ignore:
Timestamp:
03/23/22 13:10:34 (3 years ago)
Author:
pfleck
Message:

#3040 smaller fixes and some code cleanup

Location:
branches/3040_VectorBasedGP
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Views/3.4/InteractiveSymbolicRegressionSolutionSimplifierView.cs

    r18239 r18240  
    9797      return TensorFlowConstantOptimizationEvaluator.OptimizeTree(tree, regressionProblemData,
    9898        regressionProblemData.TrainingIndices,
    99         //new int[]{ 0, 1 },
    100         applyLinearScaling: false, updateVariableWeights: true, maxIterations: maxIterations, learningRate: learningRate,
     99        applyLinearScaling: true, updateVariableWeights: true, maxIterations: maxIterations, learningRate: learningRate,
    101100        cancellationToken: cancellationToken,
    102101        progress: new SynchronousProgress<double>(cost => {
     
    126125          regressionProblemData,
    127126          regressionProblemData.TrainingIndices,
    128           //new int[] { 0, 1 },
    129           applyLinearScaling: false, maxIterations: constOptIterations, updateVariableWeights: true,
     127          applyLinearScaling: true, maxIterations: constOptIterations, updateVariableWeights: true,
    130128          cancellationToken: cancellationToken, iterationCallback: (args, func, obj) => {
    131129            double newProgressValue = progress.ProgressValue + (1.0 / (constOptIterations + 2) / maxRepetitions); // (constOptIterations + 2) iterations are reported
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/TensorFlowConstantOptimizationEvaluator.cs

    r18239 r18240  
    2525
    2626using System;
     27using System.Collections;
    2728using System.Collections.Generic;
    2829#if LOG_CONSOLE
     
    5556
    5657    //private static readonly TF_DataType DataType = tf.float64;
    57     private static readonly TF_DataType DataType = tf.float32;
     58    //private static readonly TF_DataType DataType = tf.float32;
    5859
    5960    #region Parameter Properties
     
    108109      const bool eager = true;
    109110
    110      bool prepared = TreeToTensorConverter.TryPrepareTree(
     111#if LOG_FILE
     112      var directoryName = $"C:\\temp\\TFboard\\logdir\\TF_{DateTime.Now.ToString("yyyyMMddHHmmss")}_{maxIterations}_{learningRate.ToString(CultureInfo.InvariantCulture)}";
     113      Directory.CreateDirectory(directoryName);
     114      using var predictionTargetLossWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "PredictionTargetLos.csv")));
     115      using var weightsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Weights.csv")));
     116      using var treeGradsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "TreeGrads.csv")));
     117      using var lossGradsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "LossGrads.csv")));
     118
     119      predictionTargetLossWriter.WriteLine(string.Join(";", "Prediction", "Target", "Loss"));
     120      weightsWriter.WriteLine(string.Join(";", Enumerable.Range(0, 4).Select(i => $"w_{i}")));
     121      treeGradsWriter.WriteLine(string.Join(";", Enumerable.Range(0, 4).Select(i => $"Tg_{i}")));
     122      lossGradsWriter.WriteLine(string.Join(";", Enumerable.Range(0, 4).Select(i => $"Lg_{i}")));
     123#endif
     124
     125      //foreach (var row in rows) {
     126
     127      bool prepared = TreeToTensorConverter.TryPrepareTree(
    111128        tree,
    112129        problemData, rows.ToList(),
     130        //problemData, new List<int>(){ row },
    113131        updateVariableWeights, applyLinearScaling,
    114132        eager,
     
    119137
    120138      var optimizer = keras.optimizers.Adam((float)learningRate);
    121      
     139
    122140      for (int i = 0; i < maxIterations; i++) {
    123141        if (cancellationToken.IsCancellationRequested) break;
    124142
    125         using var tape = tf.GradientTape();
     143#if LOG_FILE || LOG_CONSOLE
     144        using var tape = tf.GradientTape(persistent: true);
     145#else
     146        using var tape = tf.GradientTape(persistent: false);
     147#endif
    126148
    127149        bool success = TreeToTensorConverter.TryEvaluate(
    128           tree, 
     150          tree,
    129151          inputFeatures, variables,
    130152          updateVariableWeights, applyLinearScaling,
     
    137159
    138160        progress?.Report(loss.ToArray<float>()[0]);
    139 
     161       
    140162        var variablesList = variables.Values.SelectMany(x => x).ToList();
    141163        var gradients = tape.gradient(loss, variablesList);
    142        
     164
     165#if LOG_FILE
     166        predictionTargetLossWriter.WriteLine(string.Join(";", new[] { prediction.ToArray<float>()[0], target.ToArray<float>()[0], loss.ToArray<float>()[0] }));
     167        weightsWriter.WriteLine(string.Join(";", variablesList.Select(v => v.numpy().ToArray<float>()[0])));
     168        treeGradsWriter.WriteLine(string.Join(";", tape.gradient(prediction, variablesList).Select(t => t.ToArray<float>()[0])));
     169        lossGradsWriter.WriteLine(string.Join(";", tape.gradient(loss, variablesList).Select(t => t.ToArray<float>()[0])));
     170#endif
     171
     172
     173        //break;
     174
    143175        optimizer.apply_gradients(zip(gradients, variablesList));
    144176      }
    145      
     177      //}
     178
    146179      var cloner = new Cloner();
    147180      var newTree = cloner.Clone(tree);
     
    153186
    154187
     188
     189
     190      //var numRows = rows.Count();
     191     
     192      //var variablesFeed = new Hashtable();
     193      //foreach (var kvp in inputFeatures) {
     194      //  var variableName = kvp.Key;
     195      //  var variablePlaceholder = kvp.Value;
     196      //  if (problemData.Dataset.VariableHasType<double>(variableName)) {
     197      //    var data = problemData.Dataset.GetDoubleValues(variableName, rows).Select(x => (float)x).ToArray();
     198      //    variablesFeed.Add(variablePlaceholder, np.array(data).reshape(new Shape(numRows, 1)));
     199      //  } else if (problemData.Dataset.VariableHasType<DoubleVector>(variableName)) {
     200      //    var data = problemData.Dataset.GetDoubleVectorValues(variableName, rows).SelectMany(x => x.Select(y => (float)y)).ToArray();
     201      //    variablesFeed.Add(variablePlaceholder, np.array(data).reshape(new Shape(numRows, -1)));
     202      //  } else
     203      //    throw new NotSupportedException($"Type of the variable is not supported: {variableName}");
     204      //}
     205      //var targetData = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows).Select(x => (float)x).ToArray();
     206      //variablesFeed.Add(target, np.array(targetData));
     207
     208      //using var session = tf.Session();
     209
     210      //var loss2 = tf.constant(1.23f, TF_DataType.TF_FLOAT);
     211
     212      //var graphOptimizer = tf.train.AdamOptimizer((float)learningRate);
     213      //var minimizationOperations = graphOptimizer.minimize(loss2);
     214
     215      //var init = tf.global_variables_initializer();
     216      //session.run(init);
     217
     218      //session.run((minimizationOperations, loss2), variablesFeed);
     219
     220     
     221     
     222
    155223      return newTree;
    156224
    157 
    158      
    159 
    160 
    161 //      //int numRows = rows.Count();
    162 
    163 
    164 
    165 
    166 
    167 
    168 //      var variableLengths = problemData.AllowedInputVariables.ToDictionary(
    169 //        var => var,
    170 //        var => {
    171 //          if (problemData.Dataset.VariableHasType<double>(var)) return 1;
    172 //          if (problemData.Dataset.VariableHasType<DoubleVector>(var)) return problemData.Dataset.GetDoubleVectorValue(var, 0).Count;
    173 //          throw new NotSupportedException($"Type of variable {var} is not supported.");
    174 //        });
    175 
    176 //      var variablesDict = problemData.AllowedInputVariables.ToDictionary(
    177 //        var => var,
    178 //        var => {
    179 //          if (problemData.Dataset.VariableHasType<double>(var)) {
    180 //            var data = problemData.Dataset.GetDoubleValues(var, rows).Select(x => (float)x).ToArray();
    181 //            return tf.convert_to_tensor(np.array(data).reshape(new Shape(numRows, 1)), DataType);
    182 //          } else if (problemData.Dataset.VariableHasType<DoubleVector>(var)) {
    183 //            var data = problemData.Dataset.GetDoubleVectorValues(var, rows).SelectMany(x => x.Select(y => (float)y)).ToArray();
    184 //            return tf.convert_to_tensor(np.array(data).reshape(new Shape(numRows, -1)), DataType);
    185 //          } else  throw new NotSupportedException($"Type of the variable is not supported: {var}");
    186 //        }
    187 //      );
    188 
    189 //      using var tape = tf.GradientTape(persistent: true);
    190 
    191 //      bool success = TreeToTensorConverter.TryEvaluateEager(tree,
    192 //        numRows, variablesDict,
    193 //        updateVariableWeights, applyLinearScaling,
    194 //        out Tensor prediction,
    195 //        out Dictionary<Tensor, string> parameters, out List<ResourceVariable> variables);
    196 
    197 //      //bool success = TreeToTensorConverter.TryConvert(tree,
    198 //      //  numRows, variableLengths,
    199 //      //  updateVariableWeights, applyLinearScaling,
    200 //      //  out Tensor prediction,
    201 //      //  out Dictionary<Tensor, string> parameters, out List<Tensor> variables);
    202 
    203 //      if (!success)
    204 //        return (ISymbolicExpressionTree)tree.Clone();
    205 
    206 //      //var target = tf.placeholder(DataType, new Shape(numRows), name: problemData.TargetVariable);
    207 //      var targetData = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows).Select(x => (float)x).ToArray();
    208 //      var target = tf.convert_to_tensor(np.array(targetData).reshape(new Shape(numRows)), DataType);
    209 //      // MSE
    210 //      var cost = tf.reduce_sum(tf.square(prediction - target));
    211 
    212 //      tape.watch(cost);
    213 
    214 //      //var optimizer = tf.train.AdamOptimizer((float)learningRate);
    215 //      //var optimizer = tf.train.AdamOptimizer(tf.constant(learningRate, DataType));
    216 //      //var optimizer = tf.train.GradientDescentOptimizer((float)learningRate);
    217 //      //var optimizer = tf.train.GradientDescentOptimizer(tf.constant(learningRate, DataType));
    218 //      //var optimizer = tf.train.GradientDescentOptimizer((float)learningRate);
    219 //      //var optimizer = tf.train.AdamOptimizer((float)learningRate);
    220 //      //var optimizationOperation = optimizer.minimize(cost);
    221 //      var optimizer = keras.optimizers.Adam((float)learningRate);
    222 
    223 //      #if EXPORT_GRAPH
     225     
     226//#if EXPORT_GRAPH
    224227//      //https://github.com/SciSharp/TensorFlow.NET/wiki/Debugging
    225228//      tf.train.export_meta_graph(@"C:\temp\TFboard\graph.meta", as_text: false,
    226229//        clear_devices: true, clear_extraneous_savers: false, strip_default_attrs: true);
    227230//#endif
     231
     232     
    228233
    229234//      //// features as feed items
     
    244249//      //variablesFeed.Add(target, np.array(targetData));
    245250
    246 
    247 //      List<NDArray> constants;
    248 //      //using (var session = tf.Session()) {
    249 
    250 //#if LOG_FILE
    251 //        var directoryName = $"C:\\temp\\TFboard\\logdir\\manual_{DateTime.Now.ToString("yyyyMMddHHmmss")}_{maxIterations}_{learningRate.ToString(CultureInfo.InvariantCulture)}";
    252 //        Directory.CreateDirectory(directoryName);
    253 //        var costsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Costs.csv")));
    254 //        var weightsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Weights.csv")));
    255 //        var gradientsWriter = new StreamWriter(File.Create(Path.Combine(directoryName, "Gradients.csv")));
    256 //#endif
    257 
    258 //      //session.run(tf.global_variables_initializer());
    259 
    260 //#if LOG_CONSOLE || LOG_FILE
    261 //        var gradients = optimizer.compute_gradients(cost);
    262 //#endif
    263 
    264 //      //var vars = variables.Select(v => session.run(v, variablesFeed)[0].ToArray<float>()[0]).ToList();
    265 //      //var gradient = optimizer.compute_gradients(cost)
    266 //      //  .Where(g => g.Item1 != null)
    267 //      //  //.Select(g => session.run(g.Item1, variablesFeed)[0].GetValue<float>(0)).
    268 //      //  .Select(g => session.run(g.Item1, variablesFeed)[0].ToArray<float>()[0])
    269 //      //  .ToList();
    270 
    271 //      //var gradientPrediction = optimizer.compute_gradients(prediction)
    272 //      //  .Where(g => g.Item1 != null)
    273 //      //  .Select(g => session.run(g.Item1, variablesFeed)[0].ToArray<float>()[0])
    274 //      //  .ToList();
    275 
    276 
    277 //      //progress?.Report(session.run(cost, variablesFeed)[0].ToArray<float>()[0]);
    278 //      progress?.Report(cost.ToArray<float>()[0]);
    279 
    280 
    281      
    282 
    283 
    284 //#if LOG_CONSOLE
    285 //        Trace.WriteLine("Costs:");
    286 //        Trace.WriteLine($"MSE: {session.run(cost, variablesFeed)[0].ToString(true)}");
    287 
    288 //        Trace.WriteLine("Weights:");
    289 //        foreach (var v in variables) {
    290 //          Trace.WriteLine($"{v.name}: {session.run(v).ToString(true)}");
    291 //        }
    292 
    293 //        Trace.WriteLine("Gradients:");
    294 //        foreach (var t in gradients) {
    295 //          Trace.WriteLine($"{t.Item2.name}: {session.run(t.Item1, variablesFeed)[0].ToString(true)}");
    296 //        }
    297 //#endif
    298 
    299 //#if LOG_FILE
    300 //        costsWriter.WriteLine("MSE");
    301 //        costsWriter.WriteLine(session.run(cost, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture));
    302 
    303 //        weightsWriter.WriteLine(string.Join(";", variables.Select(v => v.name)));
    304 //        weightsWriter.WriteLine(string.Join(";", variables.Select(v => session.run(v).ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
    305 
    306 //        gradientsWriter.WriteLine(string.Join(";", gradients.Select(t => t.Item2.Name)));
    307 //        gradientsWriter.WriteLine(string.Join(";", gradients.Select(t => session.run(t.Item1, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
    308 //#endif
    309 
    310 //        for (int i = 0; i < maxIterations; i++) {
    311 //          if (cancellationToken.IsCancellationRequested)
    312 //            break;
    313 
    314          
    315 //        var gradients = tape.gradient(cost, variables);
    316 //        //optimizer.apply_gradients(gradients.Zip(variables, Tuple.Create<Tensor, IVariableV1>).ToArray());
    317 //        optimizer.apply_gradients(zip(gradients, variables));
    318        
    319 
    320 //        //session.run(optimizationOperation, variablesFeed);
    321 
    322 //        progress?.Report(cost.ToArray<float>()[0]);
    323 //        //progress?.Report(session.run(cost, variablesFeed)[0].ToArray<float>()[0]);
    324 
    325 //#if LOG_CONSOLE
    326 //          Trace.WriteLine("Costs:");
    327 //          Trace.WriteLine($"MSE: {session.run(cost, variablesFeed)[0].ToString(true)}");
    328 
    329 //          Trace.WriteLine("Weights:");
    330 //          foreach (var v in variables) {
    331 //            Trace.WriteLine($"{v.name}: {session.run(v).ToString(true)}");
    332 //          }
    333 
    334 //          Trace.WriteLine("Gradients:");
    335 //          foreach (var t in gradients) {
    336 //            Trace.WriteLine($"{t.Item2.name}: {session.run(t.Item1, variablesFeed)[0].ToString(true)}");
    337 //          }
    338 //#endif
    339 
    340 //#if LOG_FILE
    341 //          costsWriter.WriteLine(session.run(cost, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture));
    342 //          weightsWriter.WriteLine(string.Join(";", variables.Select(v => session.run(v).ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
    343 //          gradientsWriter.WriteLine(string.Join(";", gradients.Select(t => session.run(t.Item1, variablesFeed)[0].ToArray<float>()[0].ToString(CultureInfo.InvariantCulture))));
    344 //#endif
    345 //      }
    346 
    347 //#if LOG_FILE
    348 //        costsWriter.Close();
    349 //        weightsWriter.Close();
    350 //        gradientsWriter.Close();
    351 //#endif
    352 //      //constants = variables.Select(v => session.run(v)).ToList();
    353 //      constants = variables.Select(v => v.numpy()).ToList();
    354 //      //}
    355 
    356 //      if (applyLinearScaling)
    357 //        constants = constants.Skip(2).ToList();
    358 //      var newTree = (ISymbolicExpressionTree)tree.Clone();
    359 //      UpdateConstants(newTree, constants, updateVariableWeights);
    360 
    361 //      return newTree;
    362251    }
    363252
     
    384273    }
    385274
    386     //private static void UpdateConstants(ISymbolicExpressionTree tree, IList<NDArray> constants, bool updateVariableWeights) {
    387     //  int i = 0;
    388     //  foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
    389     //    if (node is ConstantTreeNode constantTreeNode) {
    390     //      constantTreeNode.Value = constants[i++].ToArray<float>()[0];
    391     //    } else if (node is VariableTreeNodeBase variableTreeNodeBase && updateVariableWeights) {
    392     //      variableTreeNodeBase.Weight = constants[i++].ToArray<float>()[0];
    393     //    } else if (node is FactorVariableTreeNode factorVarTreeNode && updateVariableWeights) {
    394     //      for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
    395     //        factorVarTreeNode.Weights[j] = constants[i++].ToArray<float>()[0];
    396     //    }
    397     //  }
    398     //}
    399 
    400275    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
    401276      return TreeToTensorConverter.IsCompatible(tree);
  • branches/3040_VectorBasedGP/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Converters/TreeToTensorConverter.cs

    r18239 r18240  
    5858
    5959        inputFeatures = converter.inputFeatures;
    60         target = InputFeatureToTensor(problemData.TargetVariable, problemData, rows);
     60        target = tf.reshape(InputFeatureToTensor(problemData.TargetVariable, problemData, rows), new Shape(-1));
    6161        variables = converter.variables;
    6262
     
    201201      } else if (node.Symbol is StartSymbol) {
    202202        if (addLinearScalingTerms) {
    203           var alpha_arr = np.array(1.0f).reshape(new Shape(1, 1));
     203          var alpha_arr = np.array((float)1.0).reshape(new Shape(1, 1));
    204204          var alpha = tf.Variable(alpha_arr, name: "alpha", dtype: DataType);
    205           var beta_arr = np.array(0.0f).reshape(new Shape(1, 1));
     205          var beta_arr = np.array((float)0.0).reshape(new Shape(1, 1));
    206206          var beta = tf.Variable(beta_arr, name: "beta", dtype: DataType);
    207207          variables.Add(node, new[] { beta, alpha });
     
    272272      if (node.Symbol is Division) {
    273273        var terms = node.Subtrees.Select(EvaluateNode).ToList();
    274         //if (terms.Count == 1) return 1.0f / terms[0];
    275         if (terms.Count == 1) return 1.0 / terms[0];
     274        if (terms.Count == 1) return (float)1.0 / terms[0];
    276275        return terms.Aggregate((a, b) => a / b);
    277276      }
     
    285284        var x1 = EvaluateNode(node.GetSubtree(0));
    286285        var x2 = EvaluateNode(node.GetSubtree(1));
    287         return x1 / tf.pow(1.0f + x2 * x2, 0.5f);
     286        return x1 / tf.pow((float)1.0 + x2 * x2, (float)0.5);
    288287        //return x1 / tf.pow(1.0 + x2 * x2, 0.5);
    289288      }
     
    297296        return tf.pow(
    298297          (float)Math.E,
    299           //Math.E,
    300298          EvaluateNode(node.GetSubtree(0)));
    301299      }
     
    313311      if (node.Symbol is Cube) {
    314312        return tf.pow(
    315           EvaluateNode(node.GetSubtree(0)), 3.0f);
    316         //ConvertNode(node.GetSubtree(0)), 3.0);
     313          EvaluateNode(node.GetSubtree(0)), (float)3.0);
    317314      }
    318315
    319316      if (node.Symbol is CubeRoot) {
    320317        return tf.pow(
    321           EvaluateNode(node.GetSubtree(0)), 1.0f / 3.0f);
    322         //ConvertNode(node.GetSubtree(0)), 1.0 / 3.0);
     318          EvaluateNode(node.GetSubtree(0)), (float)1.0 / (float)3.0);
    323319        // TODO
    324320        // f: x < 0 ? -Math.Pow(-x, 1.0 / 3) : Math.Pow(x, 1.0 / 3),
Note: See TracChangeset for help on using the changeset viewer.