TreeExtensions.FastForest Methode

Definition

Überlädt

FastForest(BinaryClassificationCatalog+BinaryClassificationTrainers, FastForestBinaryTrainer+Options)

Erstellen Sie FastForestBinaryTrainer mit erweiterten Optionen, die ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

FastForest(RegressionCatalog+RegressionTrainers, FastForestRegressionTrainer+Options)

Erstellen Sie FastForestRegressionTrainer mit erweiterten Optionen, die ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

FastForest(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, Int32, Int32, Int32)

Erstellen Sie FastForestBinaryTrainerein Ziel, das ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

FastForest(RegressionCatalog+RegressionTrainers, String, String, String, Int32, Int32, Int32)

Erstellen Sie FastForestRegressionTrainerein Ziel, das ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

FastForest(BinaryClassificationCatalog+BinaryClassificationTrainers, FastForestBinaryTrainer+Options)

Erstellen Sie FastForestBinaryTrainer mit erweiterten Optionen, die ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

public static Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer FastForest (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer.Options options);
static member FastForest : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer.Options -> Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer
<Extension()>
Public Function FastForest (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, options As FastForestBinaryTrainer.Options) As FastForestBinaryTrainer

Parameter

options
FastForestBinaryTrainer.Options

Traineroptionen.

Gibt zurück

Beispiele

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers.FastTree;

namespace Samples.Dynamic.Trainers.BinaryClassification
{
    public static class FastForestWithOptions
    {
        // This example requires installation of additional NuGet package for 
        // Microsoft.ML.FastTree at
        // https://www.nuget.org/packages/Microsoft.ML.FastTree/
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define trainer options.
            var options = new FastForestBinaryTrainer.Options
            {
                // Only use 80% of features to reduce over-fitting.
                FeatureFraction = 0.8,
                // Create a simpler model by penalizing usage of new features.
                FeatureFirstUsePenalty = 0.1,
                // Reduce the number of trees to 50.
                NumberOfTrees = 50
            };

            // Define the trainer.
            var pipeline = mlContext.BinaryClassification.Trainers
                .FastForest(options);

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Print 5 predictions.
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, "
                    + $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: True, Prediction: True
            //   Label: False, Prediction: False
            //   Label: True, Prediction: True
            //   Label: True, Prediction: True
            //   Label: False, Prediction: True

            // Evaluate the overall metrics.
            var metrics = mlContext.BinaryClassification
                .EvaluateNonCalibrated(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Accuracy: 0.73
            //   AUC: 0.81
            //   F1 Score: 0.73
            //   Negative Precision: 0.77
            //   Negative Recall: 0.68
            //   Positive Precision: 0.69
            //   Positive Recall: 0.78
            //
            //   TEST POSITIVE RATIO:    0.4760 (238.0/(238.0+262.0))
            //   Confusion table
            //             ||======================
            //   PREDICTED || positive | negative | Recall
            //   TRUTH     ||======================
            //    positive ||      186 |       52 | 0.7815
            //    negative ||       77 |      185 | 0.7061
            //             ||======================
            //   Precision ||   0.7072 |   0.7806 |
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)random.NextDouble();
            for (int i = 0; i < count; i++)
            {
                var label = randomFloat() > 0.5f;
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    // For data points with false label, the feature values are
                    // slightly increased by adding a constant.
                    Features = Enumerable.Repeat(label, 50)
                        .Select(x => x ? randomFloat() : randomFloat() +
                        0.03f).ToArray()

                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public bool Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public bool Label { get; set; }
            // Predicted label from the trainer.
            public bool PredictedLabel { get; set; }
        }

        // Pretty-print BinaryClassificationMetrics objects.
        private static void PrintMetrics(BinaryClassificationMetrics metrics)
        {
            Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
            Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
            Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
            Console.WriteLine($"Negative Precision: " +
                $"{metrics.NegativePrecision:F2}");

            Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
            Console.WriteLine($"Positive Precision: " +
                $"{metrics.PositivePrecision:F2}");

            Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}


Gilt für:

FastForest(RegressionCatalog+RegressionTrainers, FastForestRegressionTrainer+Options)

Erstellen Sie FastForestRegressionTrainer mit erweiterten Optionen, die ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

public static Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer FastForest (this Microsoft.ML.RegressionCatalog.RegressionTrainers catalog, Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer.Options options);
static member FastForest : Microsoft.ML.RegressionCatalog.RegressionTrainers * Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer.Options -> Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer
<Extension()>
Public Function FastForest (catalog As RegressionCatalog.RegressionTrainers, options As FastForestRegressionTrainer.Options) As FastForestRegressionTrainer

Parameter

options
FastForestRegressionTrainer.Options

Traineroptionen.

Gibt zurück

Beispiele

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers.FastTree;

namespace Samples.Dynamic.Trainers.Regression
{
    public static class FastForestWithOptionsRegression
    {
        // This example requires installation of additional NuGet
        // package for Microsoft.ML.FastTree found at
        // https://www.nuget.org/packages/Microsoft.ML.FastTree/
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define trainer options.
            var options = new FastForestRegressionTrainer.Options
            {
                LabelColumnName = nameof(DataPoint.Label),
                FeatureColumnName = nameof(DataPoint.Features),
                // Only use 80% of features to reduce over-fitting.
                FeatureFraction = 0.8,
                // Create a simpler model by penalizing usage of new features.
                FeatureFirstUsePenalty = 0.1,
                // Reduce the number of trees to 50.
                NumberOfTrees = 50
            };

            // Define the trainer.
            var pipeline =
                mlContext.Regression.Trainers.FastForest(options);

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data.LoadFromEnumerable(
                GenerateRandomDataPoints(5, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data.CreateEnumerable<Prediction>(
                transformedTestData, reuseRowObject: false).ToList();

            // Look at 5 predictions for the Label, side by side with the actual
            // Label for comparison.
            foreach (var p in predictions)
                Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

            // Expected output:
            //   Label: 0.985, Prediction: 0.866
            //   Label: 0.155, Prediction: 0.171
            //   Label: 0.515, Prediction: 0.470
            //   Label: 0.566, Prediction: 0.476
            //   Label: 0.096, Prediction: 0.140

            // Evaluate the overall metrics
            var metrics = mlContext.Regression.Evaluate(transformedTestData);
            PrintMetrics(metrics);

            // Expected output:
            //   Mean Absolute Error: 0.06
            //   Mean Squared Error: 0.01
            //   Root Mean Squared Error: 0.07
            //   RSquared: 0.95 (closer to 1 is better. The worst case is 0)
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)
        {
            var random = new Random(seed);
            for (int i = 0; i < count; i++)
            {
                float label = (float)random.NextDouble();
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    Features = Enumerable.Repeat(label, 50).Select(
                        x => x + (float)random.NextDouble()).ToArray()
                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public float Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public float Label { get; set; }
            // Predicted score from the trainer.
            public float Score { get; set; }
        }

        // Print some evaluation metrics to regression problems.
        private static void PrintMetrics(RegressionMetrics metrics)
        {
            Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
            Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
            Console.WriteLine(
                "Root Mean Squared Error: " + metrics.RootMeanSquaredError);

            Console.WriteLine("RSquared: " + metrics.RSquared);
        }
    }
}

Gilt für:

FastForest(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, Int32, Int32, Int32)

Erstellen Sie FastForestBinaryTrainerein Ziel, das ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

public static Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer FastForest (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, int numberOfLeaves = 20, int numberOfTrees = 100, int minimumExampleCountPerLeaf = 10);
static member FastForest : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * string * string * string * int * int * int -> Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer
<Extension()>
Public Function FastForest (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional numberOfLeaves As Integer = 20, Optional numberOfTrees As Integer = 100, Optional minimumExampleCountPerLeaf As Integer = 10) As FastForestBinaryTrainer

Parameter

labelColumnName
String

Der Name der Bezeichnungsspalte. Die Spaltendaten müssen Booleansein.

featureColumnName
String

Der Name der Featurespalte. Die Spaltendaten müssen ein bekannter Vektor von Single.

exampleWeightColumnName
String

Der Name der Beispielgewichtsspalte (optional).

numberOfLeaves
Int32

Die maximale Anzahl von Blättern pro Entscheidungsbaum.

numberOfTrees
Int32

Gesamtanzahl der Entscheidungsbäume, die im Ensemble erstellt werden sollen.

minimumExampleCountPerLeaf
Int32

Die minimale Anzahl von Datenpunkten, die zum Bilden eines neuen Baumblatts erforderlich sind.

Gibt zurück

Beispiele

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;

namespace Samples.Dynamic.Trainers.BinaryClassification
{
    public static class FastForest
    {
        // This example requires installation of additional NuGet package for 
        // Microsoft.ML.FastTree at
        // https://www.nuget.org/packages/Microsoft.ML.FastTree/
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define the trainer.
            var pipeline = mlContext.BinaryClassification.Trainers
                .FastForest();

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Print 5 predictions.
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, "
                    + $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: True, Prediction: True
            //   Label: False, Prediction: False
            //   Label: True, Prediction: True
            //   Label: True, Prediction: True
            //   Label: False, Prediction: False

            // Evaluate the overall metrics.
            var metrics = mlContext.BinaryClassification
                .EvaluateNonCalibrated(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Accuracy: 0.74
            //   AUC: 0.83
            //   F1 Score: 0.74
            //   Negative Precision: 0.78
            //   Negative Recall: 0.71
            //   Positive Precision: 0.71
            //   Positive Recall: 0.78
            //
            //   TEST POSITIVE RATIO:    0.4760 (238.0/(238.0+262.0))
            //   Confusion table
            //             ||======================
            //   PREDICTED || positive | negative | Recall
            //   TRUTH     ||======================
            //    positive ||       34 |      204 | 0.1429
            //    negative ||       21 |      241 | 0.9198
            //             ||======================
            //   Precision ||   0.6182 |   0.5416 |
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)random.NextDouble();
            for (int i = 0; i < count; i++)
            {
                var label = randomFloat() > 0.5f;
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    // For data points with false label, the feature values are
                    // slightly increased by adding a constant.
                    Features = Enumerable.Repeat(label, 50)
                        .Select(x => x ? randomFloat() : randomFloat() +
                        0.03f).ToArray()

                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public bool Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public bool Label { get; set; }
            // Predicted label from the trainer.
            public bool PredictedLabel { get; set; }
        }

        // Pretty-print BinaryClassificationMetrics objects.
        private static void PrintMetrics(BinaryClassificationMetrics metrics)
        {
            Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
            Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
            Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
            Console.WriteLine($"Negative Precision: " +
                $"{metrics.NegativePrecision:F2}");

            Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
            Console.WriteLine($"Positive Precision: " +
                $"{metrics.PositivePrecision:F2}");

            Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}


Gilt für:

FastForest(RegressionCatalog+RegressionTrainers, String, String, String, Int32, Int32, Int32)

Erstellen Sie FastForestRegressionTrainerein Ziel, das ein Ziel mithilfe eines Entscheidungsstruktur-Regressionsmodells vorhersagt.

public static Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer FastForest (this Microsoft.ML.RegressionCatalog.RegressionTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, int numberOfLeaves = 20, int numberOfTrees = 100, int minimumExampleCountPerLeaf = 10);
static member FastForest : Microsoft.ML.RegressionCatalog.RegressionTrainers * string * string * string * int * int * int -> Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer
<Extension()>
Public Function FastForest (catalog As RegressionCatalog.RegressionTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional numberOfLeaves As Integer = 20, Optional numberOfTrees As Integer = 100, Optional minimumExampleCountPerLeaf As Integer = 10) As FastForestRegressionTrainer

Parameter

labelColumnName
String

Der Name der Bezeichnungsspalte. Die Spaltendaten müssen sein Single

featureColumnName
String

Der Name der Featurespalte. Die Spaltendaten müssen ein bekannter Vektor von Single

exampleWeightColumnName
String

Der Name der Beispielgewichtsspalte (optional).

numberOfLeaves
Int32

Die maximale Anzahl von Blättern pro Entscheidungsbaum.

numberOfTrees
Int32

Gesamtanzahl der Entscheidungsbäume, die im Ensemble erstellt werden sollen.

minimumExampleCountPerLeaf
Int32

Die minimale Anzahl von Datenpunkten, die zum Bilden eines neuen Baumblatts erforderlich sind.

Gibt zurück

Beispiele

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;

namespace Samples.Dynamic.Trainers.Regression
{
    public static class FastForestRegression
    {
        // This example requires installation of additional NuGet
        // package for Microsoft.ML.FastTree found at
        // https://www.nuget.org/packages/Microsoft.ML.FastTree/
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define the trainer.
            var pipeline = mlContext.Regression.Trainers.FastForest(
                labelColumnName: nameof(DataPoint.Label),
                featureColumnName: nameof(DataPoint.Features));

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data.LoadFromEnumerable(
                GenerateRandomDataPoints(5, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data.CreateEnumerable<Prediction>(
                transformedTestData, reuseRowObject: false).ToList();

            // Look at 5 predictions for the Label, side by side with the actual
            // Label for comparison.
            foreach (var p in predictions)
                Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

            // Expected output:
            //   Label: 0.985, Prediction: 0.864
            //   Label: 0.155, Prediction: 0.164
            //   Label: 0.515, Prediction: 0.470
            //   Label: 0.566, Prediction: 0.501
            //   Label: 0.096, Prediction: 0.138

            // Evaluate the overall metrics
            var metrics = mlContext.Regression.Evaluate(transformedTestData);
            PrintMetrics(metrics);

            // Expected output:
            //   Mean Absolute Error: 0.06
            //   Mean Squared Error: 0.00
            //   Root Mean Squared Error: 0.07
            //   RSquared: 0.96 (closer to 1 is better. The worst case is 0)
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)
        {
            var random = new Random(seed);
            for (int i = 0; i < count; i++)
            {
                float label = (float)random.NextDouble();
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    Features = Enumerable.Repeat(label, 50).Select(
                        x => x + (float)random.NextDouble()).ToArray()
                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public float Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public float Label { get; set; }
            // Predicted score from the trainer.
            public float Score { get; set; }
        }

        // Print some evaluation metrics to regression problems.
        private static void PrintMetrics(RegressionMetrics metrics)
        {
            Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
            Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
            Console.WriteLine(
                "Root Mean Squared Error: " + metrics.RootMeanSquaredError);

            Console.WriteLine("RSquared: " + metrics.RSquared);
        }
    }
}

Gilt für: