Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
11/03/17 20:28:37 (7 years ago)
Author:
gkronber
Message:

#2789 created x64 build of CUBGCV algorithm, fixed some bugs

Location:
branches/MathNetNumerics-Exploration-2789
Files:
1 added
7 edited

Legend:

Unmodified
Added
Removed
  • branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/642.f

    r15443 r15449  
    130130C-----------------------------------------------------------------------
    131131C
    132        SUBROUTINE CUBGCV(X,F,DF,N,Y,C,IC,VAR,JOB,SE,WK,IER)
     132      SUBROUTINE CUBGCV(X,F,DF,N,Y,C,IC,VAR,JOB,SE,WK,IER)
    133133     . BIND(C, NAME='cubgcv')
    134134      USE ISO_C_BINDING
     135      !DEC$ ATTRIBUTES DLLEXPORT::CUBGCV
    135136C
    136137C---SPECIFICATIONS FOR ARGUMENTS---
    137       INTEGER N,IC,JOB,IER
     138      INTEGER(KIND=4) N,IC,JOB,IER
    138139      DOUBLE PRECISION X(N),F(N),DF(N),Y(N),C(IC,3),SE(N),VAR,
    139140     .                 WK(0:N+1,7)
  • branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/CubicSplineGCV.cs

    r15443 r15449  
    11using System;
    22using System.Collections.Generic;
     3using System.Diagnostics;
    34using System.Linq;
    45using System.Runtime.InteropServices;
    56using System.Text;
    67using System.Threading.Tasks;
     8using HeuristicLab.Problems.DataAnalysis;
    79
    810namespace HeuristicLab.Algorithms.DataAnalysis.Experimental {
     
    105107    //                           IER = 133, JOB IS NOT 0 OR 1.
    106108
    107     // TODO: Build x64 version of the library using mingw
    108     // TODO: statically link all required libraries (libgcc_s_dw2-1.dll is still required)
    109     // To build the fortran library use:
    110     // > gfortran -static -Lc:/mingw/bin/libgcc_s_dw2-1.dll -m32 642.f -shared -o 642_x86.dll
    111     // this means that libgcc_s_dw2-1 is statically linked, i386 code is produced and a shared library is produced
     109    // To build the fortran library (x64) use:
     110    // > gfortran -m64 642.f -shared -o 642_x64.dll  or
     111    // > ifort /dll /Qm64 642.f /Fe642_x64.dll
     112    // check dumpbin /EXPORTS 642_x64.dll
     113    // and   dumpbin /IMPORTS 642_x64.dll
    112114    [DllImport("642_x64.dll", CallingConvention = CallingConvention.Cdecl, EntryPoint = "cubgcv")]
    113115    public static extern void cubgcv_x64(
    114       double[] x, double[] f, double[] df, ref int n, double[] y, double[,] c, ref int ic, ref double var, ref int job, double[] se, double[,] wk);
    115 
    116     // CUBGCV(X, F, DF, N, Y, C, IC, VAR, JOB, SE, WK, IER)
     116      double[] x,
     117      double[] f,
     118      double[] df,
     119      ref int n,
     120      [Out] double[] y,
     121      [Out] double[,] c,
     122      ref int ic,
     123      ref double var,
     124      ref int job,
     125      [Out] double[] se,
     126      double[,] wk,
     127      ref int ier);
     128
     129    // To build the fortran library (x86) use:
     130    // > gfortran -static -Lc:/mingw/bin/libgcc_s_dw2-1.dll -m32 642.f -shared -o 642_x86.dll
    117131    [DllImport("642_x86.dll", CallingConvention = CallingConvention.Cdecl, EntryPoint = "cubgcv")]
    118132    public static extern void cubgcv_x86(
    119       double[] x, 
    120       double[] f, 
    121       double[] df, 
    122       ref int n,
    123       double[] y, 
    124       double[,] c, 
    125       ref int ic,
    126       ref double var, 
    127       ref int job,
    128       double[] se, 
     133      double[] x,
     134      double[] f,
     135      double[] df,
     136      ref Int32 n,
     137      double[] y,
     138      double[,] c,
     139      ref Int32 ic,
     140      ref double var,
     141      ref Int32 job,
     142      double[] se,
    129143      double[,] wk,
    130       ref int ier);
     144      ref Int32 ier);
     145
     146
     147    public class CubGcvReport {
     148      //
     149      //   
     150      //
     151      //
     152      //   IF WK(1)=0 (RHO=0) AN INTERPOLATING NATURAL CUBIC
     153      //   SPLINE HAS BEEN CALCULATED.
     154      //   IF WK(1)=1 (RHO=INFINITE) A LEAST SQUARES
     155      //   REGRESSION LINE HAS BEEN CALCULATED.
     156      //
     157      public double smoothingParameter;
     158      //   WK(2) IS AN ESTIMATE OF THE NUMBER OF DEGREES OF
     159      //   FREEDOM OF THE RESIDUAL WHICH REDUCES TO THE
     160      //   USUAL VALUE OF N-2 WHEN A LEAST SQUARES REGRESSION
     161      //   LINE IS CALCULATED.
     162      public double estimatedRSSDegreesOfFreedom;
     163      //   WK(3),WK(4),WK(5) ARE CALCULATED WITH THE DF(I)
     164      //   SCALED TO HAVE MEAN SQUARE VALUE 1.  THE
     165      //   UNSCALED VALUES OF WK(3),WK(4),WK(5) MAY BE
     166      //   CALCULATED BY DIVIDING BY WK(7).
     167      public double generalizedCrossValidation;
     168      public double meanSquareResiudal;
     169      public double estimatedTrueMeanSquaredErrorAtDataPoints;
     170
     171      //   WK(6) COINCIDES WITH THE OUTPUT VALUE OF VAR IF
     172      //   VAR IS NEGATIVE ON INPUT.  IT IS CALCULATED WITH
     173      //   THE UNSCALED VALUES OF THE DF(I) TO FACILITATE
     174      //   COMPARISONS WITH A PRIORI VARIANCE ESTIMATES.
     175      public double estimatedErrorVariance;
     176      public double meanSquareOfDf;
     177      public double[] se;
     178    }
     179    public static ReinschSmoothingSplineModel CalculateCubicSpline(double[] x, double[] y,
     180      string targetVariable, string[] inputVars,
     181      out CubGcvReport report) {
     182      var w = Enumerable.Repeat(1.0, x.Length).ToArray();
     183      double[] x2, y2, w2;
     184      Splines.SortAndBin(x, y, w, out x2, out y2, out w2);
     185      // TODO: use weights correctly
     186
     187      int n = x2.Length;
     188      double[] df = Enumerable.Repeat(1.0, n).ToArray();   // set each df if actual standard deviation of data points is not known
     189      double[,] c = new double[3, n - 1];
     190      double var = -99.0;
     191      double[] se = new double[n]; // standard errors in points
     192      double[,] wk = new double[7, (n + 2)]; // work array;
     193      double[] y_smoothed = new double[n];
     194      int job = 1; // calc estimates of standard errors in points
     195      int ic = n - 1;
     196      int ier = -99;
     197      if (Environment.Is64BitProcess) {
     198        CubicSplineGCV.cubgcv_x64(x2, y2, df, ref n, y_smoothed,
     199          c, ref ic, ref var, ref job, se, wk, ref ier);
     200      } else {
     201        CubicSplineGCV.cubgcv_x86(x2, y2, df, ref n, y_smoothed,
     202          c, ref ic, ref var, ref job, se, wk, ref ier);
     203
     204
     205      }
     206
     207      /*
     208       *                  IER    - ERROR PARAMETER. (OUTPUT)
     209       *                           TERMINAL ERROR
     210       *                             IER = 129, IC IS LESS THAN N-1.
     211       *                             IER = 130, N IS LESS THAN 3.
     212       *                             IER = 131, INPUT ABSCISSAE ARE NOT
     213       *                               ORDERED SO THAT X(I).LT.X(I+1).
     214       *                             IER = 132, DF(I) IS NOT POSITIVE FOR SOME I.
     215       *                             IER = 133, JOB IS NOT 0 OR 1.
     216       *
     217       */
     218
     219      if (ier == 131) {
     220        throw new ArgumentException("x is not ordered");
     221      } else if (ier != 0) throw new ArgumentException("Error in CUBGCV " + ier);
     222
     223      //                           OF THE SPLINE APPROXIMATION AT T IS
     224      //                           S(T)=((C(I,3)*D+C(I,2))*D+C(I,1))*D+Y(I)
     225      //                           WHERE X(I).LE.T.LT.X(I+1) AND
     226      //                           D = T-X(I).
     227
     228      //                           WK(1) = SMOOTHING PARAMETER (= RHO/(RHO + 1))
     229      //                           WK(2) = ESTIMATE OF THE NUMBER OF DEGREES OF
     230      //                                   FREEDOM OF THE RESIDUAL SUM OF SQUARES
     231      //                           WK(3) = GENERALIZED CROSS VALIDATION
     232      //                           WK(4) = MEAN SQUARE RESIDUAL
     233      //                           WK(5) = ESTIMATE OF THE TRUE MEAN SQUARE ERROR
     234      //                                   AT THE DATA POINTS
     235      //                           WK(6) = ESTIMATE OF THE ERROR VARIANCE
     236      //                           WK(7) = MEAN SQUARE VALUE OF THE DF(I)
     237      report = new CubGcvReport();
     238      report.smoothingParameter = wk[0, 0];
     239      report.estimatedRSSDegreesOfFreedom = wk[0, 1];
     240      report.generalizedCrossValidation = wk[0, 2];
     241      report.meanSquareResiudal = wk[0, 3];
     242      report.estimatedTrueMeanSquaredErrorAtDataPoints = wk[0, 4];
     243      report.estimatedErrorVariance = wk[0, 5];
     244      report.meanSquareOfDf = wk[0, 6];
     245      report.se = se;
     246      Debug.Assert(var == report.estimatedErrorVariance);
     247
     248
     249
     250      double[] dArr = new double[n];
     251      double[] cArr = new double[n];
     252      double[] bArr = new double[n];
     253
     254      for (int i = 0; i < n - 1; i++) {
     255        bArr[i] = c[0, i];
     256        cArr[i] = c[1, i];
     257        dArr[i] = c[2, i];
     258      }
     259
     260      // extrapolate linearly for xx > x[n]
     261      bArr[n - 1] = bArr[n - 2];
     262      // extrapolate linearly for xx < x[1]
     263      dArr[0] = 0;
     264
     265      return new ReinschSmoothingSplineModel(
     266        new MyArray<double>(1, y_smoothed),
     267        new MyArray<double>(1, bArr),
     268        new MyArray<double>(1, cArr),
     269        new MyArray<double>(1, dArr),
     270        new MyArray<double>(1, x2),
     271        targetVariable, inputVars);
     272
     273
     274    }
     275
    131276  }
    132 
    133277}
  • branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/GAM.cs

    r15442 r15449  
    107107      var avgY = y.Average();
    108108      var inputVars = Problem.ProblemData.AllowedInputVariables.ToArray();
    109       var nTerms = inputVars.Length; // LR
     109      var nTerms = 0; // inputVars.Length; // LR
    110110      for (int i = 1; i <= maxInteractions; i++) {
    111111        nTerms += inputVars.Combinations(i).Count();
     
    136136      while (iters++ < maxIters) {
    137137        int j = 0;
    138         foreach (var inputVar in inputVars) {
    139           var res = CalculateResiduals(problemData, f, j, avgY, problemData.TrainingIndices);
    140           rss[j] = res.Variance();
    141           terms[j] = inputVar;
    142           f[j] = RegressLR(problemData, inputVar, res);
    143           j++;
    144         }
     138        //foreach (var inputVar in inputVars) {
     139        //  var res = CalculateResiduals(problemData, f, j, avgY, problemData.TrainingIndices);
     140        //  rss[j] = res.Variance();
     141        //  terms[j] = inputVar;
     142        //  f[j] = RegressLR(problemData, inputVar, res);
     143        //  j++;
     144        //}
    145145
    146146        for (int interaction = 1; interaction <= maxInteractions; interaction++) {
     
    202202    }
    203203
    204     private IRegressionModel RegressSpline(IRegressionProblemData problemData, string inputVar, double[] target, double lambda) {
    205       if (problemData.Dataset.VariableHasType<double>(inputVar)) {
    206         // Umständlich!
    207         return Splines.CalculatePenalizedRegressionSpline(
    208           problemData.Dataset.GetDoubleValues(inputVar, problemData.TrainingIndices).ToArray(),
    209           (double[])target.Clone(), lambda,
    210           problemData.TargetVariable, new string[] { inputVar }
    211           );
    212       } else return new ConstantModel(target.Average(), problemData.TargetVariable);
    213     }
     204    // private IRegressionModel RegressSpline(IRegressionProblemData problemData, string inputVar, double[] target, double lambda) {
     205    //   if (problemData.Dataset.VariableHasType<double>(inputVar)) {
     206    //     // Umständlich!
     207    //     return Splines.CalculatePenalizedRegressionSpline(
     208    //       problemData.Dataset.GetDoubleValues(inputVar, problemData.TrainingIndices).ToArray(),
     209    //       (double[])target.Clone(), lambda,
     210    //       problemData.TargetVariable, new string[] { inputVar }
     211    //       );
     212    //   } else return new ConstantModel(target.Average(), problemData.TargetVariable);
     213    // }
    214214    private IRegressionModel RegressSpline(IRegressionProblemData problemData, string[] inputVars, double[] target, double lambda) {
    215215      if (inputVars.All(problemData.Dataset.VariableHasType<double>)) {
     
    218218          product = product.Zip(problemData.Dataset.GetDoubleValues(inputVars[i], problemData.TrainingIndices), (pi, vi) => pi * vi).ToArray();
    219219        }
    220         double optTolerance, looRMSE;
    221         return Splines.CalculateSmoothingSplineReinsch(
     220        CubicSplineGCV.CubGcvReport report;
     221        return CubicSplineGCV.CalculateCubicSpline(
    222222          product,
    223           (double[])target.Clone(), inputVars,
    224           problemData.TargetVariable,
    225           out optTolerance, out looRMSE
     223          (double[])target.Clone(),
     224          problemData.TargetVariable, inputVars, out report
    226225          );
    227226      } else return new ConstantModel(target.Average(), problemData.TargetVariable);
  • branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/HeuristicLab.Algorithms.DataAnalysis.Experimental.csproj

    r15443 r15449  
    169169  </ItemGroup>
    170170  <ItemGroup>
     171    <Content Include="642_x64.dll">
     172      <CopyToOutputDirectory>Always</CopyToOutputDirectory>
     173    </Content>
    171174    <Content Include="642_x86.dll">
    172175      <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  • branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/Splines.cs

    r15442 r15449  
    6868          "Smoothing Spline (Reinsch with automatic tolerance determination using LOOCV)",
    6969          "B-Spline Smoothing",
    70           "Penalized Regression Spline (alglib)"
     70          "Penalized Regression Spline (alglib)",
     71          "CUBGCV"
    7172      }.Select(s => new StringValue(s)));
    7273
    73       Parameters.Add(new ConstrainedValueParameter<StringValue>("Type", "The type of spline (as supported by alglib)", validTypes, validTypes.First()));
     74      Parameters.Add(new ConstrainedValueParameter<StringValue>("Type", "The type of spline (as supported by alglib)", validTypes, validTypes.Last()));
    7475      Parameters.Add(new ValueParameter<DoubleValue>("Lambda", "Regularization parameter for smoothing splines (0..+inf)", new DoubleValue(100)));
    7576      Parameters.Add(new ValueParameter<DoubleValue>("StdDev (noise)", "Known error in y values. Used only be Reinsch Smoothing Splines", new DoubleValue(0.1)));
     
    151152              Results.Add(new Result("Optimal tolerance", new DoubleValue(optTol)));
    152153              Results.Add(new Result("RMSE (LOO-CV)", new DoubleValue(looRMSE)));
     154              break;
     155            }
     156          case "CUBGCV": {
     157              CubicSplineGCV.CubGcvReport report;
     158              var model =
     159                CubicSplineGCV.CalculateCubicSpline(x, y,
     160                Problem.ProblemData.TargetVariable, inputVars, out report);
     161              var targetVar = Problem.ProblemData.TargetVariable;
     162              var problemData = (IRegressionProblemData)Problem.ProblemData.Clone();
     163              Results.Add(new Result("Solution", model.CreateRegressionSolution(problemData)));
     164              Results.Add(new Result("GCV", new DoubleValue(report.generalizedCrossValidation)));
     165              Results.Add(new Result("Estimated error variance", new DoubleValue(report.estimatedErrorVariance)));
     166              Results.Add(new Result("Estimated RSS degrees of freedom", new DoubleValue(report.estimatedRSSDegreesOfFreedom)));
     167              Results.Add(new Result("Estimated treu mean squared error at data points", new DoubleValue(report.estimatedTrueMeanSquaredErrorAtDataPoints)));
     168              Results.Add(new Result("Mean square of DF", new DoubleValue(report.meanSquareOfDf)));
     169              Results.Add(new Result("Mean square residual", new DoubleValue(report.meanSquareResiudal)));
     170              Results.Add(new Result("Smoothing parameter (optimized for GCV)", new DoubleValue(report.smoothingParameter)));
    153171              break;
    154172            }
     
    489507      }
    490508
     509      // extrapolate for xx > x[n2]
     510      b[b.Length] = b[b.Length - 1];
     511      d[1] = 0;
     512
    491513      return new ReinschSmoothingSplineModel(a, b, c, d, x, targetVar, inputVars);
    492514    }
     
    550572    }
    551573
    552     private static void SortAndBin(double[] x, double[] y, double[] w, out double[] x2, out double[] y2, out double[] w2, bool scaling = false) {
     574    public static void SortAndBin(double[] x, double[] y, double[] w, out double[] x2, out double[] y2, out double[] w2, bool scaling = false) {
    553575      var sortedIdx = Enumerable.Range(0, x.Length).ToArray();
    554576      // sort by x
     
    679701      this.scale = scale;
    680702      this.offset = offset;
    681 
    682       // extrapolate for xx > x[n2]
    683       b[b.Length] = b[b.Length - 1];
    684       d[1] = 0;
    685       // d[b.Length] = -d[d.Length - 1];
    686703    }
    687704
     
    695712
    696713    public double GetEstimatedValue(double xx) {
    697       int n = x.Length;
     714      int n = a.Length;
    698715      if (xx <= x[1]) {
    699716        double h = xx - x[1];
  • branches/MathNetNumerics-Exploration-2789/Main/Main.csproj

    r15443 r15449  
    2323    <ErrorReport>prompt</ErrorReport>
    2424    <WarningLevel>4</WarningLevel>
     25    <Prefer32Bit>false</Prefer32Bit>
    2526  </PropertyGroup>
    2627  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
     
    4041      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath>
    4142    </Reference>
     43    <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec" />
    4244    <Reference Include="HeuristicLab.Problems.DataAnalysis-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    4345      <SpecificVersion>False</SpecificVersion>
  • branches/MathNetNumerics-Exploration-2789/Main/Program.cs

    r15443 r15449  
    1313    static void Main(string[] args) {
    1414      var xs = HeuristicLab.Common.SequenceGenerator.GenerateSteps(-3.5, 3.5, 0.1, includeEnd: true).ToList();
    15       var ys = xs.Select(xi => 1.0 / Math.Sqrt(2 *Math.PI) * Math.Exp(-0.5 * xi*xi)).ToArray(); // 1.0 / (Math.Sqrt(2 * Math.PI) * Math.Exp(-0.5 * xi * xi))).ToArray();
     15      var ys = xs.Select(xi => 1.0 / Math.Sqrt(2 * Math.PI) * Math.Exp(-0.5 * xi * xi)).ToArray(); // 1.0 / (Math.Sqrt(2 * Math.PI) * Math.Exp(-0.5 * xi * xi))).ToArray();
    1616
     17      int n = xs.Count();
    1718      alglib.hqrndstate state;
    1819      alglib.hqrndseed(1234, 5678, out state);
    1920      var ys_noise = ys.Select(yi => yi + alglib.hqrndnormal(state) * 0.1).ToList();
    2021
    21       int n = xs.Count;
    22       double[] ys_smoothed = new double[n];
    23       double[] df = Enumerable.Repeat(1.0, n).ToArray();   // set each df if actual standard deviation of data points is not known
    24       double[,] c = new double[n - 1, 3];
    25       double var = -99.0;
    26       int ic = n - 1;
    27       int job = 1; // calc estimates of standard errors in points
    28       double[] se = new double[n]; // standard errors in points
    29       double[,] wk = new double[7, (n + 2)]; // work array;
    30       int ier = -99;
    31       if (Environment.Is64BitProcess) {
    32         // CubicSplineGCV.cubgcv_x64(xs.ToArray(), f, df, ref n, ys_noise.ToArray(), c, ref ic, ref var, ref job, se, wk);
    33         Console.WriteLine("x64 version not supported");
    34       } else {
    35         CubicSplineGCV.cubgcv_x86(xs.ToArray(), ys_noise.ToArray(), df, ref n, ys_smoothed,
    36           c, ref ic, ref var, ref job, se, wk, ref ier);
     22      CubicSplineGCV.CubGcvReport report;
     23      var model = CubicSplineGCV.CalculateCubicSpline(
     24        xs.ToArray(), ys_noise.ToArray(), "y", new string[] { "x" }, out report);
    3725
     26      Console.WriteLine("Smoothing Parameter (= RHO/(RHO + 1) {0}", report.smoothingParameter);
     27      Console.WriteLine("Estimated DOF of RSS {0}", report.estimatedRSSDegreesOfFreedom);
     28      Console.WriteLine("GCV {0}", report.generalizedCrossValidation);
     29      Console.WriteLine("Mean squared residual {0}", report.meanSquareResiudal);
     30      Console.WriteLine("Estimate of true MSE at data points {0}", report.estimatedTrueMeanSquaredErrorAtDataPoints);
     31      Console.WriteLine("Estimate of error variance {0}", report.estimatedErrorVariance);
     32      Console.WriteLine("Mean square value of DF(I) {0}", report.meanSquareOfDf);
    3833
    39         //                           WK(1) = SMOOTHING PARAMETER (= RHO/(RHO + 1))
    40         //                           WK(2) = ESTIMATE OF THE NUMBER OF DEGREES OF
    41         //                                   FREEDOM OF THE RESIDUAL SUM OF SQUARES
    42         //                           WK(3) = GENERALIZED CROSS VALIDATION
    43         //                           WK(4) = MEAN SQUARE RESIDUAL
    44         //                           WK(5) = ESTIMATE OF THE TRUE MEAN SQUARE ERROR
    45         //                                   AT THE DATA POINTS
    46         //                           WK(6) = ESTIMATE OF THE ERROR VARIANCE
    47         //                           WK(7) = MEAN SQUARE VALUE OF THE DF(I)
     34      OnlineCalculatorError error;
     35      var ys_smoothed = xs.Select(xi => model.GetEstimatedValue(xi)).ToArray();
     36      var mse = OnlineMeanSquaredErrorCalculator.Calculate(ys, ys_smoothed, out error);
     37      Console.WriteLine("MSE(ys, ys_smooth) = {0}", mse);
     38      mse = OnlineMeanSquaredErrorCalculator.Calculate(ys, ys_noise, out error);
     39      Console.WriteLine("MSE(ys, ys_noise) = {0}", mse);
    4840
    49         Console.WriteLine("Smoothing Parameter (= RHO/(RHO + 1) {0}", wk[0, 0]);
    50         Console.WriteLine("Estimated DOF of RSS {0}", wk[0, 1]);
    51         Console.WriteLine("GCV {0}", wk[0, 2]);
    52         Console.WriteLine("Mean squared residual {0}", wk[0, 3]);
    53         Console.WriteLine("Estimate of true MSE at data points {0}", wk[0, 4]);
    54         Console.WriteLine("Estimate of error variance {0}", wk[0, 5]);
    55         Console.WriteLine("Mean square value of DF(I) {0}", wk[0, 6]);
     41      Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
    5642
    57         OnlineCalculatorError error;
    58         var mse = OnlineMeanSquaredErrorCalculator.Calculate(ys, ys_smoothed, out error);
    59         Console.WriteLine("MSE(ys, ys_smooth) = {0}", mse);
    60         mse = OnlineMeanSquaredErrorCalculator.Calculate(ys, ys_noise, out error);
    61         Console.WriteLine("MSE(ys, ys_noise) = {0}", mse);
    62 
    63         Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
    64 
    65         for (int i=0;i<n;i++) {
    66           Console.WriteLine("{0}\t{1}\t{2}\t{3}\t{4}\t{5}",
    67             xs[i], ys[i], ys_smoothed[i], ys_noise[i], ys_smoothed[i] + 1.96 * se[i], ys_smoothed[i] - 1.96 * se[i]);
    68         }
     43      for (int i = 0; i < n; i++) {
     44        Console.WriteLine("{0}\t{1}\t{2}\t{3}\t{4}\t{5}",
     45          xs[i], ys[i], ys_smoothed[i], ys_noise[i], ys_smoothed[i] + 1.96 * report.se[i], ys_smoothed[i] - 1.96 * report.se[i]);
    6946      }
    7047    }
Note: See TracChangeset for help on using the changeset viewer.