StandardTrainersCatalog.SdcaNonCalibrated 方法

定义

重载

SdcaNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, SdcaNonCalibratedBinaryTrainer+Options)

使用高级选项创建 SdcaNonCalibratedBinaryTrainer ,该选项使用通过布尔标签数据训练的线性分类模型预测目标。

SdcaNonCalibrated(MulticlassClassificationCatalog+MulticlassClassificationTrainers, SdcaNonCalibratedMulticlassTrainer+Options)

使用高级选项创建 SdcaNonCalibratedMulticlassTrainer ,该选项使用经过坐标下降方法训练的线性多类分类模型预测目标。

SdcaNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, ISupportSdcaClassificationLoss, Nullable<Single>, Nullable<Single>, Nullable<Int32>)

创建 SdcaNonCalibratedBinaryTrainer,该模型使用线性分类模型预测目标。

SdcaNonCalibrated(MulticlassClassificationCatalog+MulticlassClassificationTrainers, String, String, String, ISupportSdcaClassificationLoss, Nullable<Single>, Nullable<Single>, Nullable<Int32>)

创建 SdcaNonCalibratedMulticlassTrainer,该模型使用通过坐标下降方法训练的线性多类分类模型来预测目标。

SdcaNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, SdcaNonCalibratedBinaryTrainer+Options)

使用高级选项创建 SdcaNonCalibratedBinaryTrainer ,该选项使用通过布尔标签数据训练的线性分类模型预测目标。

public static Microsoft.ML.Trainers.SdcaNonCalibratedBinaryTrainer SdcaNonCalibrated (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Microsoft.ML.Trainers.SdcaNonCalibratedBinaryTrainer.Options options);
static member SdcaNonCalibrated : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * Microsoft.ML.Trainers.SdcaNonCalibratedBinaryTrainer.Options -> Microsoft.ML.Trainers.SdcaNonCalibratedBinaryTrainer
<Extension()>
Public Function SdcaNonCalibrated (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, options As SdcaNonCalibratedBinaryTrainer.Options) As SdcaNonCalibratedBinaryTrainer

参数

catalog
BinaryClassificationCatalog.BinaryClassificationTrainers

二元分类目录训练器对象。

options
SdcaNonCalibratedBinaryTrainer.Options

教练选项。

返回

示例

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;

namespace Samples.Dynamic.Trainers.BinaryClassification
{
    public static class SdcaNonCalibratedWithOptions
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // ML.NET doesn't cache data set by default. Therefore, if one reads a
            // data set from a file and accesses it many times, it can be slow due
            // to expensive featurization and disk operations. When the considered
            // data can fit into memory, a solution is to cache the data in memory.
            // Caching is especially helpful when working with iterative algorithms 
            // which needs many data passes.
            trainingData = mlContext.Data.Cache(trainingData);

            // Define trainer options.
            var options = new SdcaNonCalibratedBinaryTrainer.Options()
            {
                // Specify loss function.
                LossFunction = new HingeLoss(),
                // Make the convergence tolerance tighter.
                ConvergenceTolerance = 0.05f,
                // Increase the maximum number of passes over training data.
                MaximumNumberOfIterations = 30,
                // Give the instances of the positive class slightly more weight.
                PositiveInstanceWeight = 1.2f,
            };

            // Define the trainer.
            var pipeline = mlContext.BinaryClassification.Trainers
                .SdcaNonCalibrated(options);

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Print 5 predictions.
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, "
                    + $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: True, Prediction: False
            //   Label: False, Prediction: False
            //   Label: True, Prediction: True
            //   Label: True, Prediction: True
            //   Label: False, Prediction: True

            // Evaluate the overall metrics.
            var metrics = mlContext.BinaryClassification
                .EvaluateNonCalibrated(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Accuracy: 0.61
            //   AUC: 0.67
            //   F1 Score: 0.65
            //   Negative Precision: 0.69
            //   Negative Recall: 0.45
            //   Positive Precision: 0.56
            //   Positive Recall: 0.77
            //
            //   TEST POSITIVE RATIO:    0.4760 (238.0/(238.0+262.0))
            //   Confusion table
            //             ||======================
            //   PREDICTED || positive | negative | Recall
            //   TRUTH     ||======================
            //    positive ||      178 |       60 | 0.7479
            //    negative ||      134 |      128 | 0.4885
            //             ||======================
            //   Precision ||   0.5705 |   0.6809 |
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)random.NextDouble();
            for (int i = 0; i < count; i++)
            {
                var label = randomFloat() > 0.5f;
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    // For data points with false label, the feature values are
                    // slightly increased by adding a constant.
                    Features = Enumerable.Repeat(label, 50)
                        .Select(x => x ? randomFloat() : randomFloat() +
                        0.03f).ToArray()

                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public bool Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public bool Label { get; set; }
            // Predicted label from the trainer.
            public bool PredictedLabel { get; set; }
        }

        // Pretty-print BinaryClassificationMetrics objects.
        private static void PrintMetrics(BinaryClassificationMetrics metrics)
        {
            Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
            Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
            Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
            Console.WriteLine($"Negative Precision: " +
                $"{metrics.NegativePrecision:F2}");

            Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
            Console.WriteLine($"Positive Precision: " +
                $"{metrics.PositivePrecision:F2}");

            Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}

适用于

SdcaNonCalibrated(MulticlassClassificationCatalog+MulticlassClassificationTrainers, SdcaNonCalibratedMulticlassTrainer+Options)

使用高级选项创建 SdcaNonCalibratedMulticlassTrainer ,该选项使用经过坐标下降方法训练的线性多类分类模型预测目标。

public static Microsoft.ML.Trainers.SdcaNonCalibratedMulticlassTrainer SdcaNonCalibrated (this Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, Microsoft.ML.Trainers.SdcaNonCalibratedMulticlassTrainer.Options options);
static member SdcaNonCalibrated : Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers * Microsoft.ML.Trainers.SdcaNonCalibratedMulticlassTrainer.Options -> Microsoft.ML.Trainers.SdcaNonCalibratedMulticlassTrainer
<Extension()>
Public Function SdcaNonCalibrated (catalog As MulticlassClassificationCatalog.MulticlassClassificationTrainers, options As SdcaNonCalibratedMulticlassTrainer.Options) As SdcaNonCalibratedMulticlassTrainer

参数

catalog
MulticlassClassificationCatalog.MulticlassClassificationTrainers

多类分类目录训练器对象。

返回

示例

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;

namespace Samples.Dynamic.Trainers.MulticlassClassification
{
    public static class SdcaNonCalibratedWithOptions
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // ML.NET doesn't cache data set by default. Therefore, if one reads a
            // data set from a file and accesses it many times, it can be slow due
            // to expensive featurization and disk operations. When the considered
            // data can fit into memory, a solution is to cache the data in memory.
            // Caching is especially helpful when working with iterative algorithms 
            // which needs many data passes.
            trainingData = mlContext.Data.Cache(trainingData);

            // Define trainer options.
            var options = new SdcaNonCalibratedMulticlassTrainer.Options
            {
                Loss = new HingeLoss(),
                L1Regularization = 0.1f,
                BiasLearningRate = 0.01f,
                NumberOfThreads = 1
            };

            // Define the trainer.
            var pipeline =
                // Convert the string labels into key types.
                mlContext.Transforms.Conversion.MapValueToKey("Label")
                // Apply SdcaNonCalibrated multiclass trainer.
                .Append(mlContext.MulticlassClassification.Trainers
                .SdcaNonCalibrated(options));

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Look at 5 predictions
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, " +
                    $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: 1, Prediction: 1
            //   Label: 2, Prediction: 2
            //   Label: 3, Prediction: 2
            //   Label: 2, Prediction: 2
            //   Label: 3, Prediction: 3

            // Evaluate the overall metrics
            var metrics = mlContext.MulticlassClassification
                .Evaluate(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Micro Accuracy: 0.91
            //   Macro Accuracy: 0.91
            //   Log Loss: 0.22
            //   Log Loss Reduction: 0.80

            //   Confusion table
            //             ||========================
            //   PREDICTED ||     0 |     1 |     2 | Recall
            //   TRUTH     ||========================
            //           0 ||   145 |     0 |    15 | 0.9063
            //           1 ||     0 |   164 |    13 | 0.9266
            //           2 ||    12 |     7 |   144 | 0.8834
            //             ||========================
            //   Precision ||0.9236 |0.9591 |0.8372 |
        }

        // Generates random uniform doubles in [-0.5, 0.5)
        // range with labels 1, 2 or 3.
        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)(random.NextDouble() - 0.5);
            for (int i = 0; i < count; i++)
            {
                // Generate Labels that are integers 1, 2 or 3
                var label = random.Next(1, 4);
                yield return new DataPoint
                {
                    Label = (uint)label,
                    // Create random features that are correlated with the label.
                    // The feature values are slightly increased by adding a
                    // constant multiple of label.
                    Features = Enumerable.Repeat(label, 20)
                        .Select(x => randomFloat() + label * 0.2f).ToArray()

                };
            }
        }

        // Example with label and 20 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public uint Label { get; set; }
            [VectorType(20)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public uint Label { get; set; }
            // Predicted label from the trainer.
            public uint PredictedLabel { get; set; }
        }

        // Pretty-print MulticlassClassificationMetrics objects.
        public static void PrintMetrics(MulticlassClassificationMetrics metrics)
        {
            Console.WriteLine($"Micro Accuracy: {metrics.MicroAccuracy:F2}");
            Console.WriteLine($"Macro Accuracy: {metrics.MacroAccuracy:F2}");
            Console.WriteLine($"Log Loss: {metrics.LogLoss:F2}");
            Console.WriteLine(
                $"Log Loss Reduction: {metrics.LogLossReduction:F2}\n");

            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}

适用于

SdcaNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, ISupportSdcaClassificationLoss, Nullable<Single>, Nullable<Single>, Nullable<Int32>)

创建 SdcaNonCalibratedBinaryTrainer,该模型使用线性分类模型预测目标。

public static Microsoft.ML.Trainers.SdcaNonCalibratedBinaryTrainer SdcaNonCalibrated (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, Microsoft.ML.Trainers.ISupportSdcaClassificationLoss lossFunction = default, float? l2Regularization = default, float? l1Regularization = default, int? maximumNumberOfIterations = default);
static member SdcaNonCalibrated : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * string * string * string * Microsoft.ML.Trainers.ISupportSdcaClassificationLoss * Nullable<single> * Nullable<single> * Nullable<int> -> Microsoft.ML.Trainers.SdcaNonCalibratedBinaryTrainer
<Extension()>
Public Function SdcaNonCalibrated (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional lossFunction As ISupportSdcaClassificationLoss = Nothing, Optional l2Regularization As Nullable(Of Single) = Nothing, Optional l1Regularization As Nullable(Of Single) = Nothing, Optional maximumNumberOfIterations As Nullable(Of Integer) = Nothing) As SdcaNonCalibratedBinaryTrainer

参数

catalog
BinaryClassificationCatalog.BinaryClassificationTrainers

二元分类目录训练器对象。

labelColumnName
String

标签列的名称。 列数据必须是 Boolean

featureColumnName
String

功能列的名称。 列数据必须是已知大小的向量 Single

exampleWeightColumnName
String

示例权重列的名称 (可选) 。

lossFunction
ISupportSdcaClassificationLoss

训练过程中最小化的 损失 函数。 Defaults to LogLoss if not specified.

l2Regularization
Nullable<Single>

正则化的 L2 权重。

l1Regularization
Nullable<Single>

L1 正则化 超参数。 较高的值往往会导致更稀疏的模型。

maximumNumberOfIterations
Nullable<Int32>

要对数据执行的最大传递数。

返回

示例

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;

namespace Samples.Dynamic.Trainers.BinaryClassification
{
    public static class SdcaNonCalibrated
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // ML.NET doesn't cache data set by default. Therefore, if one reads a
            // data set from a file and accesses it many times, it can be slow due
            // to expensive featurization and disk operations. When the considered
            // data can fit into memory, a solution is to cache the data in memory.
            // Caching is especially helpful when working with iterative algorithms 
            // which needs many data passes.
            trainingData = mlContext.Data.Cache(trainingData);

            // Define the trainer.
            var pipeline = mlContext.BinaryClassification.Trainers
                .SdcaNonCalibrated();

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Print 5 predictions.
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, "
                    + $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: True, Prediction: True
            //   Label: False, Prediction: True
            //   Label: True, Prediction: True
            //   Label: True, Prediction: True
            //   Label: False, Prediction: True

            // Evaluate the overall metrics.
            var metrics = mlContext.BinaryClassification
                .EvaluateNonCalibrated(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Accuracy: 0.65
            //   AUC: 0.69
            //   F1 Score: 0.64
            //   Negative Precision: 0.68
            //   Negative Recall: 0.65
            //   Positive Precision: 0.63
            //   Positive Recall: 0.66
            //   TEST POSITIVE RATIO:    0.4760 (238.0/(238.0+262.0))
            //   Confusion table
            //             ||======================
            //   PREDICTED || positive | negative | Recall
            //   TRUTH     ||======================
            //    positive ||      154 |       84 | 0.6471
            //    negative ||       95 |      167 | 0.6374
            //             ||======================
            //   Precision ||   0.6185 |   0.6653 |
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)random.NextDouble();
            for (int i = 0; i < count; i++)
            {
                var label = randomFloat() > 0.5f;
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    // For data points with false label, the feature values are
                    // slightly increased by adding a constant.
                    Features = Enumerable.Repeat(label, 50)
                        .Select(x => x ? randomFloat() : randomFloat() +
                        0.03f).ToArray()

                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public bool Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public bool Label { get; set; }
            // Predicted label from the trainer.
            public bool PredictedLabel { get; set; }
        }

        // Pretty-print BinaryClassificationMetrics objects.
        private static void PrintMetrics(BinaryClassificationMetrics metrics)
        {
            Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
            Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
            Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
            Console.WriteLine($"Negative Precision: " +
                $"{metrics.NegativePrecision:F2}");

            Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
            Console.WriteLine($"Positive Precision: " +
                $"{metrics.PositivePrecision:F2}");

            Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}

适用于

SdcaNonCalibrated(MulticlassClassificationCatalog+MulticlassClassificationTrainers, String, String, String, ISupportSdcaClassificationLoss, Nullable<Single>, Nullable<Single>, Nullable<Int32>)

创建 SdcaNonCalibratedMulticlassTrainer,该模型使用通过坐标下降方法训练的线性多类分类模型来预测目标。

public static Microsoft.ML.Trainers.SdcaNonCalibratedMulticlassTrainer SdcaNonCalibrated (this Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, Microsoft.ML.Trainers.ISupportSdcaClassificationLoss lossFunction = default, float? l2Regularization = default, float? l1Regularization = default, int? maximumNumberOfIterations = default);
static member SdcaNonCalibrated : Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers * string * string * string * Microsoft.ML.Trainers.ISupportSdcaClassificationLoss * Nullable<single> * Nullable<single> * Nullable<int> -> Microsoft.ML.Trainers.SdcaNonCalibratedMulticlassTrainer
<Extension()>
Public Function SdcaNonCalibrated (catalog As MulticlassClassificationCatalog.MulticlassClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional lossFunction As ISupportSdcaClassificationLoss = Nothing, Optional l2Regularization As Nullable(Of Single) = Nothing, Optional l1Regularization As Nullable(Of Single) = Nothing, Optional maximumNumberOfIterations As Nullable(Of Integer) = Nothing) As SdcaNonCalibratedMulticlassTrainer

参数

catalog
MulticlassClassificationCatalog.MulticlassClassificationTrainers

多类分类目录训练器对象。

labelColumnName
String

标签列的名称。 列数据必须是 KeyDataViewType

featureColumnName
String

功能列的名称。 列数据必须是已知大小的向量 Single

exampleWeightColumnName
String

示例权重列的名称 (可选) 。

lossFunction
ISupportSdcaClassificationLoss

要最小化的 损失 函数。 Defaults to LogLoss if not specified.

l2Regularization
Nullable<Single>

正则化的 L2 权重。

l1Regularization
Nullable<Single>

L1 正则化 超参数。 较高的值往往会导致更稀疏的模型。

maximumNumberOfIterations
Nullable<Int32>

要对数据执行的最大传递数。

返回

示例

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;

namespace Samples.Dynamic.Trainers.MulticlassClassification
{
    public static class SdcaNonCalibrated
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // ML.NET doesn't cache data set by default. Therefore, if one reads a
            // data set from a file and accesses it many times, it can be slow due
            // to expensive featurization and disk operations. When the considered
            // data can fit into memory, a solution is to cache the data in memory.
            // Caching is especially helpful when working with iterative algorithms 
            // which needs many data passes.
            trainingData = mlContext.Data.Cache(trainingData);

            // Define the trainer.
            var pipeline =
                // Convert the string labels into key types.
                mlContext.Transforms.Conversion
                .MapValueToKey(nameof(DataPoint.Label))
                // Apply SdcaNonCalibrated multiclass trainer.
                .Append(mlContext.MulticlassClassification.Trainers
                .SdcaNonCalibrated());

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Look at 5 predictions
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, " +
                    $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: 1, Prediction: 1
            //   Label: 2, Prediction: 2
            //   Label: 3, Prediction: 2
            //   Label: 2, Prediction: 2
            //   Label: 3, Prediction: 3

            // Evaluate the overall metrics
            var metrics = mlContext.MulticlassClassification
                .Evaluate(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Micro Accuracy: 0.91
            //   Macro Accuracy: 0.91
            //   Log Loss: 0.57
            //   Log Loss Reduction: 0.48

            //   Confusion table
            //             ||========================
            //   PREDICTED ||     0 |     1 |     2 | Recall
            //   TRUTH     ||========================
            //           0 ||   147 |     0 |    13 | 0.9188
            //           1 ||     0 |   165 |    12 | 0.9322
            //           2 ||    11 |     8 |   144 | 0.8834
            //             ||========================
            //   Precision ||0.9304 |0.9538 |0.8521 |
        }

        // Generates random uniform doubles in [-0.5, 0.5)
        // range with labels 1, 2 or 3.
        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)(random.NextDouble() - 0.5);
            for (int i = 0; i < count; i++)
            {
                // Generate Labels that are integers 1, 2 or 3
                var label = random.Next(1, 4);
                yield return new DataPoint
                {
                    Label = (uint)label,
                    // Create random features that are correlated with the label.
                    // The feature values are slightly increased by adding a
                    // constant multiple of label.
                    Features = Enumerable.Repeat(label, 20)
                        .Select(x => randomFloat() + label * 0.2f).ToArray()

                };
            }
        }

        // Example with label and 20 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public uint Label { get; set; }
            [VectorType(20)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public uint Label { get; set; }
            // Predicted label from the trainer.
            public uint PredictedLabel { get; set; }
        }

        // Pretty-print MulticlassClassificationMetrics objects.
        public static void PrintMetrics(MulticlassClassificationMetrics metrics)
        {
            Console.WriteLine($"Micro Accuracy: {metrics.MicroAccuracy:F2}");
            Console.WriteLine($"Macro Accuracy: {metrics.MacroAccuracy:F2}");
            Console.WriteLine($"Log Loss: {metrics.LogLoss:F2}");
            Console.WriteLine(
                $"Log Loss Reduction: {metrics.LogLossReduction:F2}\n");

            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}

适用于