From 6f51711a58392e64846d12e855ad4dca87f49b3b Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Fri, 8 Jun 2018 14:33:11 -0700 Subject: [PATCH 01/11] fix namespace issue. --- .../DataLoadSave/PartitionedPathParser.cs | 2 +- .../TransformInference.cs | 5 +- src/Microsoft.ML/CSharpApi.cs | 942 +++++++++--------- .../Internal/Tools/CSharpApiGenerator.cs | 87 +- .../UnitTests/TestCSharpApi.cs | 6 +- .../Microsoft.ML.TestFramework/ModelHelper.cs | 42 +- test/Microsoft.ML.Tests/OnnxTests.cs | 4 +- .../Scenarios/SentimentPredictionTests.cs | 8 +- 8 files changed, 548 insertions(+), 548 deletions(-) diff --git a/src/Microsoft.ML.Data/DataLoadSave/PartitionedPathParser.cs b/src/Microsoft.ML.Data/DataLoadSave/PartitionedPathParser.cs index ca3aa075ab..70d8f898ab 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/PartitionedPathParser.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/PartitionedPathParser.cs @@ -76,7 +76,7 @@ public class Arguments : IPartitionedPathParserFactory { [Argument(ArgumentType.Multiple, HelpText = "Column definitions used to override the Partitioned Path Parser. Expected with the format name:type:numeric-source, e.g. col=MyFeature:R4:1", ShortName = "col", SortOrder = 1)] - public Microsoft.ML.Runtime.Data.PartitionedFileLoader.Column[] Columns; + public PartitionedFileLoader.Column[] Columns; [Argument(ArgumentType.AtMostOnce, HelpText = "Data type of each column.")] public DataKind Type = DataKind.Text; diff --git a/src/Microsoft.ML.PipelineInference/TransformInference.cs b/src/Microsoft.ML.PipelineInference/TransformInference.cs index 988b56eedf..6390139030 100644 --- a/src/Microsoft.ML.PipelineInference/TransformInference.cs +++ b/src/Microsoft.ML.PipelineInference/TransformInference.cs @@ -6,7 +6,6 @@ using System.Collections.Generic; using System.Linq; using System.Text; -using Microsoft.ML; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; @@ -712,7 +711,7 @@ public override IEnumerable Apply(IntermediateColumn[] colum { Name = columnNameQuoted.ToString(), Source = columnNameQuoted.ToString(), - ResultType = ML.Transforms.DataKind.R4 + ResultType = ML.Data.DataKind.R4 }); } @@ -721,7 +720,7 @@ public override IEnumerable Apply(IntermediateColumn[] colum ch.Info("Suggested conversion to numeric for boolean features."); var args = new SubComponent("Convert", new[] { $"{columnArgument}type=R4" }); - var epInput = new ML.Transforms.ColumnTypeConverter { Column = epColumns.ToArray(), ResultType = ML.Transforms.DataKind.R4 }; + var epInput = new ML.Transforms.ColumnTypeConverter { Column = epColumns.ToArray(), ResultType = ML.Data.DataKind.R4 }; ColumnRoutingStructure.AnnotatedName[] columnsSource = epColumns.Select(c => new ColumnRoutingStructure.AnnotatedName { IsNumeric = false, Name = c.Name }).ToArray(); ColumnRoutingStructure.AnnotatedName[] columnsDest = diff --git a/src/Microsoft.ML/CSharpApi.cs b/src/Microsoft.ML/CSharpApi.cs index 4d0809350b..f1dd8a051c 100644 --- a/src/Microsoft.ML/CSharpApi.cs +++ b/src/Microsoft.ML/CSharpApi.cs @@ -1473,57 +1473,84 @@ public sealed class Output namespace Data { - public sealed partial class TextLoaderArguments + public enum DataKind : byte { - /// - /// Use separate parsing threads? - /// - public bool UseThreads { get; set; } = true; + I1 = 1, + U1 = 2, + I2 = 3, + U2 = 4, + I4 = 5, + U4 = 6, + I8 = 7, + U8 = 8, + R4 = 9, + Num = 9, + R8 = 10, + TX = 11, + Text = 11, + TXT = 11, + BL = 12, + Bool = 12, + TimeSpan = 13, + TS = 13, + DT = 14, + DateTime = 14, + DZ = 15, + DateTimeZone = 15, + UG = 16, + U16 = 16 + } + public sealed partial class TextLoaderRange + { /// - /// File containing a header with feature names. If specified, header defined in the data file (header+) is ignored. + /// First index in the range /// - public string HeaderFile { get; set; } + public int Min { get; set; } /// - /// Maximum number of rows to produce + /// Last index in the range /// - public long? MaxRows { get; set; } + public int? Max { get; set; } /// - /// Whether the input may include quoted values, which can contain separator characters, colons, and distinguish empty values from missing values. When true, consecutive separators denote a missing value and an empty value is denoted by "". When false, consecutive separators denote an empty value. + /// This range extends to the end of the line, but should be a fixed number of items /// - public bool AllowQuoting { get; set; } = true; + public bool AutoEnd { get; set; } = false; /// - /// Whether the input may include sparse representations + /// This range extends to the end of the line, which can vary from line to line /// - public bool AllowSparse { get; set; } = true; + public bool VariableEnd { get; set; } = false; /// - /// Number of source columns in the text data. Default is that sparse rows contain their size information. + /// This range includes only other indices not specified /// - public int? InputSize { get; set; } + public bool AllOther { get; set; } = false; /// - /// Source column separator. + /// Force scalar columns to be treated as vectors of length one /// - public char[] Separator { get; set; } = { '\t' }; + public bool ForceVector { get; set; } = false; + + } + public sealed partial class KeyRange + { /// - /// Column groups. Each group is specified as name:type:numeric-ranges, eg, col=Features:R4:1-17,26,35-40 + /// First index in the range /// - public TextLoaderColumn[] Column { get; set; } + public ulong Min { get; set; } = 0; /// - /// Remove trailing whitespace from lines + /// Last index in the range /// - public bool TrimWhitespace { get; set; } = false; + public ulong? Max { get; set; } /// - /// Data file has header with feature names. Header is read only if options 'hs' and 'hf' are not specified. + /// Whether the key is contiguous /// - public bool HasHeader { get; set; } = false; + public bool Contiguous { get; set; } = true; } @@ -1551,56 +1578,57 @@ public sealed partial class TextLoaderColumn } - public sealed partial class TextLoaderRange + public sealed partial class TextLoaderArguments { /// - /// First index in the range + /// Use separate parsing threads? /// - public int Min { get; set; } + public bool UseThreads { get; set; } = true; /// - /// Last index in the range + /// File containing a header with feature names. If specified, header defined in the data file (header+) is ignored. /// - public int? Max { get; set; } + public string HeaderFile { get; set; } /// - /// This range extends to the end of the line, but should be a fixed number of items + /// Maximum number of rows to produce /// - public bool AutoEnd { get; set; } = false; + public long? MaxRows { get; set; } /// - /// This range extends to the end of the line, which can vary from line to line + /// Whether the input may include quoted values, which can contain separator characters, colons, and distinguish empty values from missing values. When true, consecutive separators denote a missing value and an empty value is denoted by "". When false, consecutive separators denote an empty value. /// - public bool VariableEnd { get; set; } = false; + public bool AllowQuoting { get; set; } = true; /// - /// This range includes only other indices not specified + /// Whether the input may include sparse representations /// - public bool AllOther { get; set; } = false; + public bool AllowSparse { get; set; } = true; /// - /// Force scalar columns to be treated as vectors of length one + /// Number of source columns in the text data. Default is that sparse rows contain their size information. /// - public bool ForceVector { get; set; } = false; + public int? InputSize { get; set; } - } + /// + /// Source column separator. + /// + public char[] Separator { get; set; } = { '\t' }; - public sealed partial class KeyRange - { /// - /// First index in the range + /// Column groups. Each group is specified as name:type:numeric-ranges, eg, col=Features:R4:1-17,26,35-40 /// - public ulong Min { get; set; } = 0; + public TextLoaderColumn[] Column { get; set; } /// - /// Last index in the range + /// Remove trailing whitespace from lines /// - public ulong? Max { get; set; } + public bool TrimWhitespace { get; set; } = false; /// - /// Whether the key is contiguous + /// Data file has header with feature names. Header is read only if options 'hs' and 'hf' are not specified. /// - public bool Contiguous { get; set; } = true; + public bool HasHeader { get; set; } = false; } @@ -1616,25 +1644,25 @@ public TextLoader(string filePath) { _inputFilePath = filePath; } - + public void SetInput(IHostEnvironment env, Experiment experiment) { IFileHandle inputFile = new SimpleFileHandle(env, _inputFilePath, false, false); experiment.SetInput(InputFile, inputFile); } - + public Var GetInputData() => null; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { Contracts.Assert(previousStep == null); - + return new TextLoaderPipelineStep(experiment.Add(this)); } - + private class TextLoaderPipelineStep : ILearningPipelineDataStep { - public TextLoaderPipelineStep (Output output) + public TextLoaderPipelineStep(Output output) { Data = output.Data; Model = null; @@ -1652,7 +1680,7 @@ public TextLoaderPipelineStep (Output output) /// /// Arguments /// - public Data.TextLoaderArguments Arguments { get; set; } = new Data.TextLoaderArguments(); + public TextLoaderArguments Arguments { get; set; } = new TextLoaderArguments(); public sealed class Output @@ -1918,12 +1946,12 @@ public sealed partial class BinaryCrossValidator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphInput(); + public CrossValidationBinaryMacroSubGraphInput Inputs { get; set; } = new CrossValidationBinaryMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphOutput(); + public CrossValidationBinaryMacroSubGraphOutput Outputs { get; set; } = new CrossValidationBinaryMacroSubGraphOutput(); /// /// Column to use for stratification @@ -2190,7 +2218,7 @@ public sealed partial class CrossValidationResultsCombiner /// /// Specifies the trainer kind, which determines the evaluator to be used. /// - public Models.MacroUtilsTrainerKinds Kind { get; set; } = Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public MacroUtilsTrainerKinds Kind { get; set; } = MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; public sealed class Output @@ -2270,12 +2298,12 @@ public sealed partial class CrossValidator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.CrossValidationMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.CrossValidationMacroSubGraphInput(); + public CrossValidationMacroSubGraphInput Inputs { get; set; } = new CrossValidationMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.CrossValidationMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.CrossValidationMacroSubGraphOutput(); + public CrossValidationMacroSubGraphOutput Outputs { get; set; } = new CrossValidationMacroSubGraphOutput(); /// /// Column to use for stratification @@ -2290,7 +2318,7 @@ public sealed partial class CrossValidator /// /// Specifies the trainer kind, which determines the evaluator to be used. /// - public Microsoft.ML.Models.MacroUtilsTrainerKinds Kind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public MacroUtilsTrainerKinds Kind { get; set; } = MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; /// /// Column to use for labels @@ -2416,7 +2444,7 @@ public sealed class Output } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2491,7 +2519,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2628,7 +2656,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2699,7 +2727,7 @@ public sealed partial class OneVersusAll : Microsoft.ML.Runtime.EntryPoints.Comm /// /// The training subgraph output. /// - public Microsoft.ML.Models.OneVersusAllMacroSubGraphOutput OutputForSubGraph { get; set; } = new Microsoft.ML.Models.OneVersusAllMacroSubGraphOutput(); + public OneVersusAllMacroSubGraphOutput OutputForSubGraph { get; set; } = new OneVersusAllMacroSubGraphOutput(); /// /// Use probabilities in OVA combiner @@ -2729,12 +2757,12 @@ public sealed partial class OneVersusAll : Microsoft.ML.Runtime.EntryPoints.Comm /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public NormalizeOption NormalizeFeatures { get; set; } = NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public CachingOptions Caching { get; set; } = CachingOptions.Auto; public sealed class Output @@ -2746,7 +2774,7 @@ public sealed class Output } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2874,12 +2902,12 @@ public sealed partial class OvaModelCombiner : Microsoft.ML.Runtime.EntryPoints. /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public NormalizeOption NormalizeFeatures { get; set; } = NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public CachingOptions Caching { get; set; } = CachingOptions.Auto; public sealed class Output @@ -2891,7 +2919,7 @@ public sealed class Output } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2955,7 +2983,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -3077,7 +3105,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -3433,12 +3461,12 @@ public sealed partial class TrainTestBinaryEvaluator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.TrainTestBinaryMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.TrainTestBinaryMacroSubGraphInput(); + public TrainTestBinaryMacroSubGraphInput Inputs { get; set; } = new TrainTestBinaryMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.TrainTestBinaryMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.TrainTestBinaryMacroSubGraphOutput(); + public TrainTestBinaryMacroSubGraphOutput Outputs { get; set; } = new TrainTestBinaryMacroSubGraphOutput(); public sealed class Output @@ -3528,17 +3556,17 @@ public sealed partial class TrainTestEvaluator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.TrainTestMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.TrainTestMacroSubGraphInput(); + public TrainTestMacroSubGraphInput Inputs { get; set; } = new TrainTestMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.TrainTestMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.TrainTestMacroSubGraphOutput(); + public TrainTestMacroSubGraphOutput Outputs { get; set; } = new TrainTestMacroSubGraphOutput(); /// /// Specifies the trainer kind, which determines the evaluator to be used. /// - public Microsoft.ML.Models.MacroUtilsTrainerKinds Kind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public MacroUtilsTrainerKinds Kind { get; set; } = MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; /// /// Identifies which pipeline was run for this train test. @@ -3652,13 +3680,13 @@ public sealed partial class AveragedPerceptronBinaryClassifier : Microsoft.ML.Ru /// /// Learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[]{0.01f, 0.1f, 0.5f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[] { 0.01f, 0.1f, 0.5f, 1f })] public float LearningRate { get; set; } = 1f; /// /// Decrease learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[] { false, true })] public bool DecreaseLearningRate { get; set; } = false; /// @@ -3700,7 +3728,7 @@ public sealed partial class AveragedPerceptronBinaryClassifier : Microsoft.ML.Ru /// /// Number of iterations /// - [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize:10, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize: 10, isLogScale: true)] public int NumIterations { get; set; } = 1; /// @@ -3711,13 +3739,13 @@ public sealed partial class AveragedPerceptronBinaryClassifier : Microsoft.ML.Ru /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] public float InitWtsDiameter { get; set; } /// /// Whether to shuffle for each training iteration /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -3760,7 +3788,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -3900,7 +3928,7 @@ public sealed partial class FastForestBinaryClassifier : Microsoft.ML.Runtime.En /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -3940,19 +3968,19 @@ public sealed partial class FastForestBinaryClassifier : Microsoft.ML.Runtime.En /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -4060,7 +4088,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -4182,7 +4210,7 @@ public sealed partial class FastForestRegressor : Microsoft.ML.Runtime.EntryPoin /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -4222,19 +4250,19 @@ public sealed partial class FastForestRegressor : Microsoft.ML.Runtime.EntryPoin /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -4342,7 +4370,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -4415,7 +4443,7 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -4451,19 +4479,19 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -4580,7 +4608,7 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -4620,19 +4648,19 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -4740,7 +4768,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -4841,7 +4869,7 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -4877,19 +4905,19 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -5006,7 +5034,7 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -5046,19 +5074,19 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -5166,7 +5194,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IRan } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -5227,7 +5255,7 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -5263,19 +5291,19 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -5392,7 +5420,7 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -5432,19 +5460,19 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -5552,7 +5580,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -5618,7 +5646,7 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -5654,19 +5682,19 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -5783,7 +5811,7 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -5823,19 +5851,19 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -5943,7 +5971,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6010,7 +6038,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// Total number of iterations over all features /// - [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[]{200, 1500, 9500})] + [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[] { 200, 1500, 9500 })] public int NumIterations { get; set; } = 9500; /// @@ -6021,7 +6049,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale: true)] public double LearningRates { get; set; } = 0.002d; /// @@ -6052,7 +6080,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// Minimum number of training instances required to form a partition /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[] { 1, 10, 50 })] public int MinDocuments { get; set; } = 10; /// @@ -6100,7 +6128,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6151,7 +6179,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// Total number of iterations over all features /// - [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[]{200, 1500, 9500})] + [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[] { 200, 1500, 9500 })] public int NumIterations { get; set; } = 9500; /// @@ -6162,7 +6190,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale: true)] public double LearningRates { get; set; } = 0.002d; /// @@ -6193,7 +6221,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// Minimum number of training instances required to form a partition /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[] { 1, 10, 50 })] public int MinDocuments { get; set; } = 10; /// @@ -6241,7 +6269,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6289,13 +6317,13 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry /// /// The number of clusters /// - [TlcModule.SweepableDiscreteParamAttribute("K", new object[]{5, 10, 20, 40})] + [TlcModule.SweepableDiscreteParamAttribute("K", new object[] { 5, 10, 20, 40 })] public int K { get; set; } = 5; /// /// Cluster initialization algorithm /// - public Trainers.KMeansPlusPlusTrainerInitAlgorithm InitAlgorithm { get; set; } = Trainers.KMeansPlusPlusTrainerInitAlgorithm.KMeansParallel; + public KMeansPlusPlusTrainerInitAlgorithm InitAlgorithm { get; set; } = KMeansPlusPlusTrainerInitAlgorithm.KMeansParallel; /// /// Tolerance parameter for trainer convergence. Lower = slower, more accurate @@ -6335,12 +6363,12 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry /// /// Normalize option for the feature column /// - public Models.NormalizeOption NormalizeFeatures { get; set; } = Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Models.CachingOptions Caching { get; set; } = Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IClusteringOutput, Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITrainerOutput @@ -6352,7 +6380,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IClu } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6393,7 +6421,7 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Regularizer constant /// - [TlcModule.SweepableFloatParamAttribute("Lambda", 1E-05f, 0.1f, stepSize:10, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Lambda", 1E-05f, 0.1f, stepSize: 10, isLogScale: true)] public float Lambda { get; set; } = 0.001f; /// @@ -6404,13 +6432,13 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Perform projection to unit-ball? Typically used with batch size > 1. /// - [TlcModule.SweepableDiscreteParamAttribute("PerformProjection", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("PerformProjection", new object[] { false, true })] public bool PerformProjection { get; set; } = false; /// /// No bias /// - [TlcModule.SweepableDiscreteParamAttribute("NoBias", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("NoBias", new object[] { false, true })] public bool NoBias { get; set; } = false; /// @@ -6427,7 +6455,7 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Number of iterations /// - [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize:10, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize: 10, isLogScale: true)] public int NumIterations { get; set; } = 1; /// @@ -6438,13 +6466,13 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] public float InitWtsDiameter { get; set; } /// /// Whether to shuffle for each training iteration /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -6487,7 +6515,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6533,25 +6561,25 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// L2 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps:4)] + [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps: 4)] public float L2Weight { get; set; } = 1f; /// /// L1 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps:4)] + [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps: 4)] public float L1Weight { get; set; } = 1f; /// /// Tolerance parameter for optimization convergence. Lower = slower, more accurate /// - [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] + [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[] { 0.0001f, 1E-07f })] public float OptTol { get; set; } = 1E-07f; /// /// Memory size for L-BFGS. Lower=faster, less accurate /// - [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] public int MemorySize { get; set; } = 20; /// @@ -6573,7 +6601,7 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] public float InitWtsDiameter { get; set; } /// @@ -6589,7 +6617,7 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Force densification of the internal optimization vectors /// - [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })] public bool DenseOptimizer { get; set; } = false; /// @@ -6620,12 +6648,12 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Normalize option for the feature column /// - public Models.NormalizeOption NormalizeFeatures { get; set; } = Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Models.CachingOptions Caching { get; set; } = Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBinaryClassificationOutput, Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITrainerOutput @@ -6637,7 +6665,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6683,25 +6711,25 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// L2 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps:4)] + [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps: 4)] public float L2Weight { get; set; } = 1f; /// /// L1 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps:4)] + [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps: 4)] public float L1Weight { get; set; } = 1f; /// /// Tolerance parameter for optimization convergence. Lower = slower, more accurate /// - [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] + [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[] { 0.0001f, 1E-07f })] public float OptTol { get; set; } = 1E-07f; /// /// Memory size for L-BFGS. Lower=faster, less accurate /// - [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] public int MemorySize { get; set; } = 20; /// @@ -6723,7 +6751,7 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] public float InitWtsDiameter { get; set; } /// @@ -6739,7 +6767,7 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// Force densification of the internal optimization vectors /// - [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })] public bool DenseOptimizer { get; set; } = false; /// @@ -6787,7 +6815,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6860,7 +6888,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6907,13 +6935,13 @@ public sealed partial class OnlineGradientDescentRegressor : Microsoft.ML.Runtim /// /// Learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[]{0.01f, 0.1f, 0.5f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[] { 0.01f, 0.1f, 0.5f, 1f })] public float LearningRate { get; set; } = 0.1f; /// /// Decrease learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[] { false, true })] public bool DecreaseLearningRate { get; set; } = true; /// @@ -6955,7 +6983,7 @@ public sealed partial class OnlineGradientDescentRegressor : Microsoft.ML.Runtim /// /// Number of iterations /// - [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize:10, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize: 10, isLogScale: true)] public int NumIterations { get; set; } = 1; /// @@ -6966,13 +6994,13 @@ public sealed partial class OnlineGradientDescentRegressor : Microsoft.ML.Runtim /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] public float InitWtsDiameter { get; set; } /// /// Whether to shuffle for each training iteration /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -7015,7 +7043,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7056,7 +7084,7 @@ public sealed partial class OrdinaryLeastSquaresRegressor : Microsoft.ML.Runtime /// /// L2 regularization weight /// - [TlcModule.SweepableDiscreteParamAttribute("L2Weight", new object[]{1E-06f, 0.1f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("L2Weight", new object[] { 1E-06f, 0.1f, 1f })] public float L2Weight { get; set; } = 1E-06f; /// @@ -7104,7 +7132,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7145,19 +7173,19 @@ public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoint /// /// The number of components in the PCA /// - [TlcModule.SweepableDiscreteParamAttribute("Rank", new object[]{10, 20, 40, 80})] + [TlcModule.SweepableDiscreteParamAttribute("Rank", new object[] { 10, 20, 40, 80 })] public int Rank { get; set; } = 20; /// /// Oversampling parameter for randomized PCA training /// - [TlcModule.SweepableDiscreteParamAttribute("Oversampling", new object[]{10, 20, 40})] + [TlcModule.SweepableDiscreteParamAttribute("Oversampling", new object[] { 10, 20, 40 })] public int Oversampling { get; set; } = 20; /// /// If enabled, data is centered to be zero mean /// - [TlcModule.SweepableDiscreteParamAttribute("Center", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Center", new object[] { false, true })] public bool Center { get; set; } = true; /// @@ -7183,12 +7211,12 @@ public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoint /// /// Normalize option for the feature column /// - public Models.NormalizeOption NormalizeFeatures { get; set; } = Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Models.CachingOptions Caching { get; set; } = Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IAnomalyDetectionOutput, Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITrainerOutput @@ -7200,7 +7228,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IAno } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7241,25 +7269,25 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// L2 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps:4)] + [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps: 4)] public float L2Weight { get; set; } = 1f; /// /// L1 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps:4)] + [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps: 4)] public float L1Weight { get; set; } = 1f; /// /// Tolerance parameter for optimization convergence. Lower = slower, more accurate /// - [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] + [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[] { 0.0001f, 1E-07f })] public float OptTol { get; set; } = 1E-07f; /// /// Memory size for L-BFGS. Lower=faster, less accurate /// - [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] public int MemorySize { get; set; } = 20; /// @@ -7281,7 +7309,7 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] public float InitWtsDiameter { get; set; } /// @@ -7297,7 +7325,7 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// Force densification of the internal optimization vectors /// - [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })] public bool DenseOptimizer { get; set; } = false; /// @@ -7345,7 +7373,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7408,13 +7436,13 @@ public sealed partial class StochasticDualCoordinateAscentBinaryClassifier : Mic /// /// L2 regularizer constant. By default the l2 constant is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{"", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f})] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { "", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f })] public float? L2Const { get; set; } /// /// L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[]{"", 0f, 0.25f, 0.5f, 0.75f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[] { "", 0f, 0.25f, 0.5f, 0.75f, 1f })] public float? L1Threshold { get; set; } /// @@ -7425,19 +7453,19 @@ public sealed partial class StochasticDualCoordinateAscentBinaryClassifier : Mic /// /// The tolerance for the ratio between duality gap and primal loss for convergence checking. /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.001f, 0.01f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.001f, 0.01f, 0.1f, 0.2f })] public float ConvergenceTolerance { get; set; } = 0.1f; /// /// Maximum number of iterations; set to 1 to simulate online learning. Defaults to automatic. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{"", 10, 20, 100})] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { "", 10, 20, 100 })] public int? MaxIterations { get; set; } /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -7448,7 +7476,7 @@ public sealed partial class StochasticDualCoordinateAscentBinaryClassifier : Mic /// /// The learning rate for adjusting bias from being regularized. /// - [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[]{0f, 0.01f, 0.1f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[] { 0f, 0.01f, 0.1f, 1f })] public float BiasLearningRate { get; set; } /// @@ -7486,7 +7514,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7533,13 +7561,13 @@ public sealed partial class StochasticDualCoordinateAscentClassifier : Microsoft /// /// L2 regularizer constant. By default the l2 constant is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{"", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f})] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { "", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f })] public float? L2Const { get; set; } /// /// L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[]{"", 0f, 0.25f, 0.5f, 0.75f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[] { "", 0f, 0.25f, 0.5f, 0.75f, 1f })] public float? L1Threshold { get; set; } /// @@ -7550,19 +7578,19 @@ public sealed partial class StochasticDualCoordinateAscentClassifier : Microsoft /// /// The tolerance for the ratio between duality gap and primal loss for convergence checking. /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.001f, 0.01f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.001f, 0.01f, 0.1f, 0.2f })] public float ConvergenceTolerance { get; set; } = 0.1f; /// /// Maximum number of iterations; set to 1 to simulate online learning. Defaults to automatic. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{"", 10, 20, 100})] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { "", 10, 20, 100 })] public int? MaxIterations { get; set; } /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -7573,7 +7601,7 @@ public sealed partial class StochasticDualCoordinateAscentClassifier : Microsoft /// /// The learning rate for adjusting bias from being regularized. /// - [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[]{0f, 0.01f, 0.1f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[] { 0f, 0.01f, 0.1f, 1f })] public float BiasLearningRate { get; set; } /// @@ -7611,7 +7639,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7658,13 +7686,13 @@ public sealed partial class StochasticDualCoordinateAscentRegressor : Microsoft. /// /// L2 regularizer constant. By default the l2 constant is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{"", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f})] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { "", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f })] public float? L2Const { get; set; } /// /// L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[]{"", 0f, 0.25f, 0.5f, 0.75f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[] { "", 0f, 0.25f, 0.5f, 0.75f, 1f })] public float? L1Threshold { get; set; } /// @@ -7675,19 +7703,19 @@ public sealed partial class StochasticDualCoordinateAscentRegressor : Microsoft. /// /// The tolerance for the ratio between duality gap and primal loss for convergence checking. /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.001f, 0.01f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.001f, 0.01f, 0.1f, 0.2f })] public float ConvergenceTolerance { get; set; } = 0.01f; /// /// Maximum number of iterations; set to 1 to simulate online learning. Defaults to automatic. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{"", 10, 20, 100})] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { "", 10, 20, 100 })] public int? MaxIterations { get; set; } /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -7698,7 +7726,7 @@ public sealed partial class StochasticDualCoordinateAscentRegressor : Microsoft. /// /// The learning rate for adjusting bias from being regularized. /// - [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[]{0f, 0.01f, 0.1f, 1f})] + [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[] { 0f, 0.01f, 0.1f, 1f })] public float BiasLearningRate { get; set; } = 1f; /// @@ -7736,7 +7764,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7783,7 +7811,7 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// L2 regularizer constant /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{1E-07f, 5E-07f, 1E-06f, 5E-06f, 1E-05f})] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { 1E-07f, 5E-07f, 1E-06f, 5E-06f, 1E-05f })] public float L2Const { get; set; } = 1E-06f; /// @@ -7794,13 +7822,13 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// Exponential moving averaged improvement tolerance for convergence /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.01f, 0.001f, 0.0001f, 1E-05f})] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.01f, 0.001f, 0.0001f, 1E-05f })] public double ConvergenceTolerance { get; set; } = 0.0001d; /// /// Maximum number of iterations; set to 1 to simulate online learning. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{1, 5, 10, 20})] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { 1, 5, 10, 20 })] public int MaxIterations { get; set; } = 20; /// @@ -7811,7 +7839,7 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] public bool Shuffle { get; set; } = true; /// @@ -7875,7 +7903,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7953,7 +7981,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8018,7 +8046,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8089,7 +8117,7 @@ public sealed partial class BinNormalizer : Microsoft.ML.Runtime.EntryPoints.Com public BinNormalizer() { } - + public BinNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -8100,7 +8128,7 @@ public BinNormalizer(params string[] inputColumns) } } } - + public BinNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8111,7 +8139,7 @@ public BinNormalizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8130,7 +8158,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformBinColumn[] Column { get; set; } + public NormalizeTransformBinColumn[] Column { get; set; } /// /// Max number of bins, power of 2 recommended @@ -8167,7 +8195,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8233,7 +8261,7 @@ public sealed partial class CategoricalHashTransformColumn : OneToOneColumn /// Output kind: Bag (multi-set vector), Ind (indicator vector), or Key (index) /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind? OutputKind { get; set; } + public CategoricalTransformOutputKind? OutputKind { get; set; } /// /// Name of the new column @@ -8256,7 +8284,7 @@ public sealed partial class CategoricalHashOneHotVectorizer : Microsoft.ML.Runti public CategoricalHashOneHotVectorizer() { } - + public CategoricalHashOneHotVectorizer(params string[] inputColumns) { if (inputColumns != null) @@ -8267,7 +8295,7 @@ public CategoricalHashOneHotVectorizer(params string[] inputColumns) } } } - + public CategoricalHashOneHotVectorizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8278,7 +8306,7 @@ public CategoricalHashOneHotVectorizer(params ValueTuple[] input } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8297,7 +8325,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:hashBits:src) /// - public Microsoft.ML.Transforms.CategoricalHashTransformColumn[] Column { get; set; } + public CategoricalHashTransformColumn[] Column { get; set; } /// /// Number of bits to hash into. Must be between 1 and 30, inclusive. @@ -8322,7 +8350,7 @@ public void AddColumn(string name, string source) /// /// Output kind: Bag (multi-set vector), Ind (indicator vector), or Key (index) /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind OutputKind { get; set; } = Microsoft.ML.Transforms.CategoricalTransformOutputKind.Bag; + public CategoricalTransformOutputKind OutputKind { get; set; } = CategoricalTransformOutputKind.Bag; /// /// Input dataset @@ -8344,7 +8372,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8388,7 +8416,7 @@ public sealed partial class CategoricalTransformColumn : OneToOneColumn /// Output kind: Bag (multi-set vector), Ind (indicator vector), Key (index), or Binary encoded indicator vector /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind? OutputKind { get; set; } + public CategoricalTransformOutputKind? OutputKind { get; set; } /// /// Maximum number of terms to keep when auto-training @@ -8403,7 +8431,7 @@ public sealed partial class CategoricalTransformColumn : OneToOneColumn /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder? Sort { get; set; } + public TermTransformSortOrder? Sort { get; set; } /// /// Whether key value metadata should be text, regardless of the actual input type @@ -8431,7 +8459,7 @@ public sealed partial class CategoricalOneHotVectorizer : Microsoft.ML.Runtime.E public CategoricalOneHotVectorizer() { } - + public CategoricalOneHotVectorizer(params string[] inputColumns) { if (inputColumns != null) @@ -8442,7 +8470,7 @@ public CategoricalOneHotVectorizer(params string[] inputColumns) } } } - + public CategoricalOneHotVectorizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8453,7 +8481,7 @@ public CategoricalOneHotVectorizer(params ValueTuple[] inputOutp } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8472,12 +8500,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.CategoricalTransformColumn[] Column { get; set; } + public CategoricalTransformColumn[] Column { get; set; } /// /// Output kind: Bag (multi-set vector), Ind (indicator vector), or Key (index) /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind OutputKind { get; set; } = Microsoft.ML.Transforms.CategoricalTransformOutputKind.Ind; + public CategoricalTransformOutputKind OutputKind { get; set; } = CategoricalTransformOutputKind.Ind; /// /// Maximum number of terms to keep per column when auto-training @@ -8492,7 +8520,7 @@ public void AddColumn(string name, string source) /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Whether key value metadata should be text, regardless of the actual input type @@ -8519,7 +8547,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8575,7 +8603,7 @@ public sealed partial class CharacterTokenizer : Microsoft.ML.Runtime.EntryPoint public CharacterTokenizer() { } - + public CharacterTokenizer(params string[] inputColumns) { if (inputColumns != null) @@ -8586,7 +8614,7 @@ public CharacterTokenizer(params string[] inputColumns) } } } - + public CharacterTokenizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8597,7 +8625,7 @@ public CharacterTokenizer(params ValueTuple[] inputOutputColumns } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8616,7 +8644,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.CharTokenizeTransformColumn[] Column { get; set; } + public CharTokenizeTransformColumn[] Column { get; set; } /// /// Whether to mark the beginning/end of each row/slot with start of text character (0x02)/end of text character (0x03) @@ -8643,7 +8671,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8699,12 +8727,12 @@ public sealed partial class ColumnConcatenator : Microsoft.ML.Runtime.EntryPoint public ColumnConcatenator() { } - + public ColumnConcatenator(string outputColumn, params string[] inputColumns) { AddColumn(outputColumn, inputColumns); } - + public void AddColumn(string name, params string[] source) { var list = Column == null ? new List() : new List(Column); @@ -8716,7 +8744,7 @@ public void AddColumn(string name, params string[] source) /// /// New column definition(s) (optional form: name:srcs) /// - public Microsoft.ML.Transforms.ConcatTransformColumn[] Column { get; set; } + public ConcatTransformColumn[] Column { get; set; } /// /// Input dataset @@ -8738,7 +8766,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8794,7 +8822,7 @@ public sealed partial class ColumnCopier : Microsoft.ML.Runtime.EntryPoints.Comm public ColumnCopier() { } - + public ColumnCopier(params string[] inputColumns) { if (inputColumns != null) @@ -8805,7 +8833,7 @@ public ColumnCopier(params string[] inputColumns) } } } - + public ColumnCopier(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8816,7 +8844,7 @@ public ColumnCopier(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8835,7 +8863,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.CopyColumnsTransformColumn[] Column { get; set; } + public CopyColumnsTransformColumn[] Column { get; set; } /// /// Input dataset @@ -8857,7 +8885,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8922,7 +8950,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8987,7 +9015,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9019,41 +9047,13 @@ public ColumnSelectorPipelineStep(Output output) namespace Transforms { - public enum DataKind : byte - { - I1 = 1, - U1 = 2, - I2 = 3, - U2 = 4, - I4 = 5, - U4 = 6, - I8 = 7, - U8 = 8, - R4 = 9, - Num = 9, - R8 = 10, - TX = 11, - Text = 11, - TXT = 11, - BL = 12, - Bool = 12, - TimeSpan = 13, - TS = 13, - DT = 14, - DateTime = 14, - DZ = 15, - DateTimeZone = 15, - UG = 16, - U16 = 16 - } - public sealed partial class ConvertTransformColumn : OneToOneColumn, IOneToOneColumn { /// /// The result type /// - public Microsoft.ML.Transforms.DataKind? ResultType { get; set; } + public Microsoft.ML.Data.DataKind? ResultType { get; set; } /// /// For a key column, this defines the range of values @@ -9081,7 +9081,7 @@ public sealed partial class ColumnTypeConverter : Microsoft.ML.Runtime.EntryPoin public ColumnTypeConverter() { } - + public ColumnTypeConverter(params string[] inputColumns) { if (inputColumns != null) @@ -9092,7 +9092,7 @@ public ColumnTypeConverter(params string[] inputColumns) } } } - + public ColumnTypeConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9103,7 +9103,7 @@ public ColumnTypeConverter(params ValueTuple[] inputOutputColumn } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9122,12 +9122,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:type:src) /// - public Microsoft.ML.Transforms.ConvertTransformColumn[] Column { get; set; } + public ConvertTransformColumn[] Column { get; set; } /// /// The result type /// - public Microsoft.ML.Transforms.DataKind? ResultType { get; set; } + public Microsoft.ML.Data.DataKind? ResultType { get; set; } /// /// For a key column, this defines the range of values @@ -9154,7 +9154,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9224,7 +9224,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9290,7 +9290,7 @@ public sealed partial class ConditionalNormalizer : Microsoft.ML.Runtime.EntryPo public ConditionalNormalizer() { } - + public ConditionalNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -9301,7 +9301,7 @@ public ConditionalNormalizer(params string[] inputColumns) } } } - + public ConditionalNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9312,7 +9312,7 @@ public ConditionalNormalizer(params ValueTuple[] inputOutputColu } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9331,7 +9331,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformAffineColumn[] Column { get; set; } + public NormalizeTransformAffineColumn[] Column { get; set; } /// /// Whether to map zero to zero, preserving sparsity @@ -9363,7 +9363,7 @@ public sealed class Output } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9412,7 +9412,7 @@ public sealed partial class DataCache : Microsoft.ML.Runtime.EntryPoints.CommonI /// /// Caching strategy /// - public Microsoft.ML.Transforms.CacheCachingType Caching { get; set; } = Microsoft.ML.Transforms.CacheCachingType.Memory; + public CacheCachingType Caching { get; set; } = CacheCachingType.Memory; /// /// Input dataset @@ -9429,7 +9429,7 @@ public sealed class Output } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9555,7 +9555,7 @@ public sealed partial class TermTransformColumn : OneToOneColumn /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder? Sort { get; set; } + public TermTransformSortOrder? Sort { get; set; } /// /// Whether key value metadata should be text, regardless of the actual input type @@ -9583,7 +9583,7 @@ public sealed partial class Dictionarizer : Microsoft.ML.Runtime.EntryPoints.Com public Dictionarizer() { } - + public Dictionarizer(params string[] inputColumns) { if (inputColumns != null) @@ -9594,7 +9594,7 @@ public Dictionarizer(params string[] inputColumns) } } } - + public Dictionarizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9605,7 +9605,7 @@ public Dictionarizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9624,7 +9624,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.TermTransformColumn[] Column { get; set; } + public TermTransformColumn[] Column { get; set; } /// /// Maximum number of terms to keep per column when auto-training @@ -9639,7 +9639,7 @@ public void AddColumn(string name, string source) /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Whether key value metadata should be text, regardless of the actual input type @@ -9666,7 +9666,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9731,7 +9731,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9801,7 +9801,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9881,7 +9881,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9952,7 +9952,7 @@ public sealed partial class GlobalContrastNormalizer : Microsoft.ML.Runtime.Entr public GlobalContrastNormalizer() { } - + public GlobalContrastNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -9963,7 +9963,7 @@ public GlobalContrastNormalizer(params string[] inputColumns) } } } - + public GlobalContrastNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9974,7 +9974,7 @@ public GlobalContrastNormalizer(params ValueTuple[] inputOutputC } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9993,7 +9993,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformGcnColumn[] Column { get; set; } + public LpNormNormalizerTransformGcnColumn[] Column { get; set; } /// /// Subtract mean from each value before normalizing @@ -10030,7 +10030,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10111,7 +10111,7 @@ public sealed partial class HashConverter : Microsoft.ML.Runtime.EntryPoints.Com public HashConverter() { } - + public HashConverter(params string[] inputColumns) { if (inputColumns != null) @@ -10122,7 +10122,7 @@ public HashConverter(params string[] inputColumns) } } } - + public HashConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10133,7 +10133,7 @@ public HashConverter(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10152,7 +10152,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.HashJoinTransformColumn[] Column { get; set; } + public HashJoinTransformColumn[] Column { get; set; } /// /// Whether the values need to be combined for a single hash @@ -10194,7 +10194,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10250,7 +10250,7 @@ public sealed partial class KeyToTextConverter : Microsoft.ML.Runtime.EntryPoint public KeyToTextConverter() { } - + public KeyToTextConverter(params string[] inputColumns) { if (inputColumns != null) @@ -10261,7 +10261,7 @@ public KeyToTextConverter(params string[] inputColumns) } } } - + public KeyToTextConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10272,7 +10272,7 @@ public KeyToTextConverter(params ValueTuple[] inputOutputColumns } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10291,7 +10291,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.KeyToValueTransformColumn[] Column { get; set; } + public KeyToValueTransformColumn[] Column { get; set; } /// /// Input dataset @@ -10313,7 +10313,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10383,7 +10383,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10444,7 +10444,7 @@ public sealed partial class LabelIndicator : Microsoft.ML.Runtime.EntryPoints.Co public LabelIndicator() { } - + public LabelIndicator(params string[] inputColumns) { if (inputColumns != null) @@ -10455,7 +10455,7 @@ public LabelIndicator(params string[] inputColumns) } } } - + public LabelIndicator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10466,7 +10466,7 @@ public LabelIndicator(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10485,7 +10485,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.LabelIndicatorTransformColumn[] Column { get; set; } + public LabelIndicatorTransformColumn[] Column { get; set; } /// /// Label of the positive class. @@ -10512,7 +10512,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10577,7 +10577,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10638,7 +10638,7 @@ public sealed partial class LogMeanVarianceNormalizer : Microsoft.ML.Runtime.Ent public LogMeanVarianceNormalizer() { } - + public LogMeanVarianceNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -10649,7 +10649,7 @@ public LogMeanVarianceNormalizer(params string[] inputColumns) } } } - + public LogMeanVarianceNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10660,7 +10660,7 @@ public LogMeanVarianceNormalizer(params ValueTuple[] inputOutput } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10684,7 +10684,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformLogNormalColumn[] Column { get; set; } + public NormalizeTransformLogNormalColumn[] Column { get; set; } /// /// Max number of examples used to train the normalizer @@ -10711,7 +10711,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10757,7 +10757,7 @@ public sealed partial class LpNormNormalizerTransformColumn : OneToOneColumn /// The norm to use to normalize each sample /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformNormalizerKind? NormKind { get; set; } + public LpNormNormalizerTransformNormalizerKind? NormKind { get; set; } /// /// Subtract mean from each value before normalizing @@ -10785,7 +10785,7 @@ public sealed partial class LpNormalizer : Microsoft.ML.Runtime.EntryPoints.Comm public LpNormalizer() { } - + public LpNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -10796,7 +10796,7 @@ public LpNormalizer(params string[] inputColumns) } } } - + public LpNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10807,7 +10807,7 @@ public LpNormalizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10826,12 +10826,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformColumn[] Column { get; set; } + public LpNormNormalizerTransformColumn[] Column { get; set; } /// /// The norm to use to normalize each sample /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformNormalizerKind NormKind { get; set; } = Microsoft.ML.Transforms.LpNormNormalizerTransformNormalizerKind.L2Norm; + public LpNormNormalizerTransformNormalizerKind NormKind { get; set; } = LpNormNormalizerTransformNormalizerKind.L2Norm; /// /// Subtract mean from each value before normalizing @@ -10858,7 +10858,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10932,7 +10932,7 @@ public sealed partial class MeanVarianceNormalizer : Microsoft.ML.Runtime.EntryP public MeanVarianceNormalizer() { } - + public MeanVarianceNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -10943,7 +10943,7 @@ public MeanVarianceNormalizer(params string[] inputColumns) } } } - + public MeanVarianceNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10954,7 +10954,7 @@ public MeanVarianceNormalizer(params ValueTuple[] inputOutputCol } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10978,7 +10978,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformAffineColumn[] Column { get; set; } + public NormalizeTransformAffineColumn[] Column { get; set; } /// /// Whether to map zero to zero, preserving sparsity @@ -11010,7 +11010,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11052,7 +11052,7 @@ public sealed partial class MinMaxNormalizer : Microsoft.ML.Runtime.EntryPoints. public MinMaxNormalizer() { } - + public MinMaxNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -11063,7 +11063,7 @@ public MinMaxNormalizer(params string[] inputColumns) } } } - + public MinMaxNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11074,7 +11074,7 @@ public MinMaxNormalizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11093,7 +11093,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformAffineColumn[] Column { get; set; } + public NormalizeTransformAffineColumn[] Column { get; set; } /// /// Whether to map zero to zero, preserving sparsity @@ -11125,7 +11125,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11175,7 +11175,7 @@ public sealed partial class NAHandleTransformColumn : OneToOneColumn /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAHandleTransformReplacementKind? Kind { get; set; } + public NAHandleTransformReplacementKind? Kind { get; set; } /// /// Whether to impute values by slot @@ -11208,7 +11208,7 @@ public sealed partial class MissingValueHandler : Microsoft.ML.Runtime.EntryPoin public MissingValueHandler() { } - + public MissingValueHandler(params string[] inputColumns) { if (inputColumns != null) @@ -11219,7 +11219,7 @@ public MissingValueHandler(params string[] inputColumns) } } } - + public MissingValueHandler(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11230,7 +11230,7 @@ public MissingValueHandler(params ValueTuple[] inputOutputColumn } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11249,12 +11249,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:rep:src) /// - public Microsoft.ML.Transforms.NAHandleTransformColumn[] Column { get; set; } + public NAHandleTransformColumn[] Column { get; set; } /// /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAHandleTransformReplacementKind ReplaceWith { get; set; } = Microsoft.ML.Transforms.NAHandleTransformReplacementKind.Def; + public NAHandleTransformReplacementKind ReplaceWith { get; set; } = NAHandleTransformReplacementKind.Def; /// /// Whether to impute values by slot @@ -11286,7 +11286,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11342,7 +11342,7 @@ public sealed partial class MissingValueIndicator : Microsoft.ML.Runtime.EntryPo public MissingValueIndicator() { } - + public MissingValueIndicator(params string[] inputColumns) { if (inputColumns != null) @@ -11353,7 +11353,7 @@ public MissingValueIndicator(params string[] inputColumns) } } } - + public MissingValueIndicator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11364,7 +11364,7 @@ public MissingValueIndicator(params ValueTuple[] inputOutputColu } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11383,7 +11383,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NAIndicatorTransformColumn[] Column { get; set; } + public NAIndicatorTransformColumn[] Column { get; set; } /// /// Input dataset @@ -11405,7 +11405,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11461,7 +11461,7 @@ public sealed partial class MissingValuesDropper : Microsoft.ML.Runtime.EntryPoi public MissingValuesDropper() { } - + public MissingValuesDropper(params string[] inputColumns) { if (inputColumns != null) @@ -11472,7 +11472,7 @@ public MissingValuesDropper(params string[] inputColumns) } } } - + public MissingValuesDropper(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11483,7 +11483,7 @@ public MissingValuesDropper(params ValueTuple[] inputOutputColum } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11502,7 +11502,7 @@ public void AddColumn(string name, string source) /// /// Columns to drop the NAs for /// - public Microsoft.ML.Transforms.NADropTransformColumn[] Column { get; set; } + public NADropTransformColumn[] Column { get; set; } /// /// Input dataset @@ -11524,7 +11524,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11594,7 +11594,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11652,7 +11652,7 @@ public sealed partial class NAReplaceTransformColumn : OneToOneColumn /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAReplaceTransformReplacementKind? Kind { get; set; } + public NAReplaceTransformReplacementKind? Kind { get; set; } /// /// Whether to impute values by slot @@ -11680,7 +11680,7 @@ public sealed partial class MissingValueSubstitutor : Microsoft.ML.Runtime.Entry public MissingValueSubstitutor() { } - + public MissingValueSubstitutor(params string[] inputColumns) { if (inputColumns != null) @@ -11691,7 +11691,7 @@ public MissingValueSubstitutor(params string[] inputColumns) } } } - + public MissingValueSubstitutor(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11702,7 +11702,7 @@ public MissingValueSubstitutor(params ValueTuple[] inputOutputCo } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11721,12 +11721,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:rep:src) /// - public Microsoft.ML.Transforms.NAReplaceTransformColumn[] Column { get; set; } + public NAReplaceTransformColumn[] Column { get; set; } /// /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAReplaceTransformReplacementKind ReplacementKind { get; set; } = Microsoft.ML.Transforms.NAReplaceTransformReplacementKind.Def; + public NAReplaceTransformReplacementKind ReplacementKind { get; set; } = NAReplaceTransformReplacementKind.Def; /// /// Whether to impute values by slot @@ -11753,7 +11753,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11845,7 +11845,7 @@ public sealed partial class NgramTransformColumn : OneToOneColumn /// Statistical measure used to evaluate how important a word is to a document in a corpus /// - public Microsoft.ML.Transforms.NgramTransformWeightingCriteria? Weighting { get; set; } + public NgramTransformWeightingCriteria? Weighting { get; set; } /// /// Name of the new column @@ -11868,7 +11868,7 @@ public sealed partial class NGramTranslator : Microsoft.ML.Runtime.EntryPoints.C public NGramTranslator() { } - + public NGramTranslator(params string[] inputColumns) { if (inputColumns != null) @@ -11879,7 +11879,7 @@ public NGramTranslator(params string[] inputColumns) } } } - + public NGramTranslator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11890,7 +11890,7 @@ public NGramTranslator(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11909,7 +11909,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NgramTransformColumn[] Column { get; set; } + public NgramTransformColumn[] Column { get; set; } /// /// Maximum ngram length @@ -11934,7 +11934,7 @@ public void AddColumn(string name, string source) /// /// The weighting criteria /// - public Microsoft.ML.Transforms.NgramTransformWeightingCriteria Weighting { get; set; } = Microsoft.ML.Transforms.NgramTransformWeightingCriteria.Tf; + public NgramTransformWeightingCriteria Weighting { get; set; } = NgramTransformWeightingCriteria.Tf; /// /// Input dataset @@ -11956,7 +11956,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12016,7 +12016,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12081,7 +12081,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12162,7 +12162,7 @@ public sealed partial class PcaCalculator : Microsoft.ML.Runtime.EntryPoints.Com public PcaCalculator() { } - + public PcaCalculator(params string[] inputColumns) { if (inputColumns != null) @@ -12173,7 +12173,7 @@ public PcaCalculator(params string[] inputColumns) } } } - + public PcaCalculator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -12184,18 +12184,18 @@ public PcaCalculator(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { - var list = Column == null ? new List() : new List(Column); - list.Add(OneToOneColumn.Create(source)); + var list = Column == null ? new List() : new List(Column); + list.Add(OneToOneColumn.Create(source)); Column = list.ToArray(); } public void AddColumn(string name, string source) { - var list = Column == null ? new List() : new List(Column); - list.Add(OneToOneColumn.Create(name, source)); + var list = Column == null ? new List() : new List(Column); + list.Add(OneToOneColumn.Create(name, source)); Column = list.ToArray(); } @@ -12203,7 +12203,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Transforms.PcaTransformColumn[] Column { get; set; } + public PcaTransformColumn[] Column { get; set; } /// /// The name of the weight column @@ -12250,7 +12250,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12315,7 +12315,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12377,7 +12377,7 @@ public sealed partial class RandomNumberGenerator : Microsoft.ML.Runtime.EntryPo /// /// New column definition(s) (optional form: name:seed) /// - public Microsoft.ML.Transforms.GenerateNumberTransformColumn[] Column { get; set; } + public GenerateNumberTransformColumn[] Column { get; set; } /// /// Use an auto-incremented integer starting at zero instead of a random number @@ -12409,7 +12409,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12499,7 +12499,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12569,7 +12569,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12634,7 +12634,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12699,7 +12699,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12764,7 +12764,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12851,7 +12851,7 @@ public sealed partial class Segregator : Microsoft.ML.Runtime.EntryPoints.Common /// /// Specifies how to unroll multiple pivot columns of different size. /// - public Microsoft.ML.Transforms.UngroupTransformUngroupMode Mode { get; set; } = Microsoft.ML.Transforms.UngroupTransformUngroupMode.Inner; + public UngroupTransformUngroupMode Mode { get; set; } = UngroupTransformUngroupMode.Inner; /// /// Input dataset @@ -12873,7 +12873,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12943,7 +12943,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12985,7 +12985,7 @@ public sealed partial class SupervisedBinNormalizer : Microsoft.ML.Runtime.Entry public SupervisedBinNormalizer() { } - + public SupervisedBinNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -12996,7 +12996,7 @@ public SupervisedBinNormalizer(params string[] inputColumns) } } } - + public SupervisedBinNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -13007,7 +13007,7 @@ public SupervisedBinNormalizer(params ValueTuple[] inputOutputCo } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -13036,7 +13036,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformBinColumn[] Column { get; set; } + public NormalizeTransformBinColumn[] Column { get; set; } /// /// Max number of bins, power of 2 recommended @@ -13073,7 +13073,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13156,7 +13156,7 @@ public sealed partial class TermLoaderArguments /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Drop unknown terms instead of mapping them to NA term. @@ -13174,12 +13174,12 @@ public sealed partial class TextFeaturizer : Microsoft.ML.Runtime.EntryPoints.Co public TextFeaturizer() { } - + public TextFeaturizer(string outputColumn, params string[] inputColumns) { AddColumn(outputColumn, inputColumns); } - + public void AddColumn(string name, params string[] source) { Column = ManyToOneColumn.Create(name, source); @@ -13189,12 +13189,12 @@ public void AddColumn(string name, params string[] source) /// /// New column definition (optional form: name:srcs). /// - public Microsoft.ML.Transforms.TextTransformColumn Column { get; set; } + public TextTransformColumn Column { get; set; } /// /// Dataset language or 'AutoDetect' to detect language per row. /// - public Microsoft.ML.Transforms.TextTransformLanguage Language { get; set; } = Microsoft.ML.Transforms.TextTransformLanguage.English; + public TextTransformLanguage Language { get; set; } = TextTransformLanguage.English; /// /// Stopwords remover. @@ -13205,7 +13205,7 @@ public void AddColumn(string name, params string[] source) /// /// Casing text using the rules of the invariant culture. /// - public Microsoft.ML.Transforms.TextNormalizerTransformCaseNormalizationMode TextCase { get; set; } = Microsoft.ML.Transforms.TextNormalizerTransformCaseNormalizationMode.Lower; + public TextNormalizerTransformCaseNormalizationMode TextCase { get; set; } = TextNormalizerTransformCaseNormalizationMode.Lower; /// /// Whether to keep diacritical marks or remove them. @@ -13230,7 +13230,7 @@ public void AddColumn(string name, params string[] source) /// /// A dictionary of whitelisted terms. /// - public Microsoft.ML.Transforms.TermLoaderArguments Dictionary { get; set; } + public TermLoaderArguments Dictionary { get; set; } /// /// Ngram feature extractor to use for words (WordBag/WordHashBag). @@ -13247,7 +13247,7 @@ public void AddColumn(string name, params string[] source) /// /// Normalize vectors (rows) individually by rescaling them to unit norm. /// - public Microsoft.ML.Transforms.TextTransformTextNormKind VectorNormalizer { get; set; } = Microsoft.ML.Transforms.TextTransformTextNormKind.L2; + public TextTransformTextNormKind VectorNormalizer { get; set; } = TextTransformTextNormKind.L2; /// /// Input dataset @@ -13269,7 +13269,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13311,7 +13311,7 @@ public sealed partial class TextToKeyConverter : Microsoft.ML.Runtime.EntryPoint public TextToKeyConverter() { } - + public TextToKeyConverter(params string[] inputColumns) { if (inputColumns != null) @@ -13322,7 +13322,7 @@ public TextToKeyConverter(params string[] inputColumns) } } } - + public TextToKeyConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -13333,7 +13333,7 @@ public TextToKeyConverter(params ValueTuple[] inputOutputColumns } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -13352,7 +13352,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.TermTransformColumn[] Column { get; set; } + public TermTransformColumn[] Column { get; set; } /// /// Maximum number of terms to keep per column when auto-training @@ -13367,7 +13367,7 @@ public void AddColumn(string name, string source) /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Whether key value metadata should be text, regardless of the actual input type @@ -13394,7 +13394,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13511,7 +13511,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13604,7 +13604,7 @@ public sealed partial class WordTokenizer : Microsoft.ML.Runtime.EntryPoints.Com public WordTokenizer() { } - + public WordTokenizer(params string[] inputColumns) { if (inputColumns != null) @@ -13615,7 +13615,7 @@ public WordTokenizer(params string[] inputColumns) } } } - + public WordTokenizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -13626,7 +13626,7 @@ public WordTokenizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -13645,7 +13645,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) /// - public Microsoft.ML.Transforms.DelimitedTokenizeTransformColumn[] Column { get; set; } + public DelimitedTokenizeTransformColumn[] Column { get; set; } /// /// Comma separated set of term separator(s). Commonly: 'space', 'comma', 'semicolon' or other single character. @@ -13672,7 +13672,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13704,7 +13704,7 @@ public WordTokenizerPipelineStep(Output output) namespace Runtime { - public abstract class AutoMlEngine : ComponentKind {} + public abstract class AutoMlEngine : ComponentKind { } @@ -13756,7 +13756,7 @@ public sealed class UniformRandomAutoMlEngine : AutoMlEngine internal override string ComponentName => "UniformRandom"; } - public abstract class AutoMlStateBase : ComponentKind {} + public abstract class AutoMlStateBase : ComponentKind { } public enum AutoInferenceAutoMlMlStateArgumentsMetrics { @@ -13800,7 +13800,7 @@ public sealed class AutoMlStateAutoMlStateBase : AutoMlStateBase /// /// Supported metric for evaluator. /// - public Microsoft.ML.Runtime.AutoInferenceAutoMlMlStateArgumentsMetrics Metric { get; set; } = Microsoft.ML.Runtime.AutoInferenceAutoMlMlStateArgumentsMetrics.Auc; + public AutoInferenceAutoMlMlStateArgumentsMetrics Metric { get; set; } = AutoInferenceAutoMlMlStateArgumentsMetrics.Auc; /// /// AutoML engine (pipeline optimizer) that generates next candidates. @@ -13827,7 +13827,7 @@ public sealed class AutoMlStateAutoMlStateBase : AutoMlStateBase internal override string ComponentName => "AutoMlState"; } - public abstract class CalibratorTrainer : ComponentKind {} + public abstract class CalibratorTrainer : ComponentKind { } @@ -13879,7 +13879,7 @@ public sealed class PlattCalibratorCalibratorTrainer : CalibratorTrainer internal override string ComponentName => "PlattCalibrator"; } - public abstract class ClassificationLossFunction : ComponentKind {} + public abstract class ClassificationLossFunction : ComponentKind { } @@ -13936,7 +13936,7 @@ public sealed class SmoothedHingeLossClassificationLossFunction : Classification internal override string ComponentName => "SmoothedHingeLoss"; } - public abstract class EarlyStoppingCriterion : ComponentKind {} + public abstract class EarlyStoppingCriterion : ComponentKind { } @@ -14030,7 +14030,7 @@ public sealed class UPEarlyStoppingCriterion : EarlyStoppingCriterion internal override string ComponentName => "UP"; } - public abstract class FastTreeTrainer : ComponentKind {} + public abstract class FastTreeTrainer : ComponentKind { } @@ -14103,19 +14103,19 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -14272,19 +14272,19 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -14491,19 +14491,19 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -14660,19 +14660,19 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -14839,19 +14839,19 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -15008,19 +15008,19 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -15192,19 +15192,19 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] public double DropoutRate { get; set; } /// @@ -15361,19 +15361,19 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] public int NumTrees { get; set; } = 100; /// @@ -15474,7 +15474,7 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer internal override string ComponentName => "FastTreeTweedieRegression"; } - public abstract class NgramExtractor : ComponentKind {} + public abstract class NgramExtractor : ComponentKind { } @@ -15556,7 +15556,7 @@ public sealed class NGramHashNgramExtractor : NgramExtractor internal override string ComponentName => "NGramHash"; } - public abstract class ParallelTraining : ComponentKind {} + public abstract class ParallelTraining : ComponentKind { } @@ -15568,7 +15568,7 @@ public sealed class SingleParallelTraining : ParallelTraining internal override string ComponentName => "Single"; } - public abstract class PartitionedPathParser : ComponentKind {} + public abstract class PartitionedPathParser : ComponentKind { } @@ -15581,7 +15581,7 @@ public sealed class ParquetPathParserPartitionedPathParser : PartitionedPathPars } - public sealed class PartitionedFileLoaderColumn + public sealed partial class PartitionedFileLoaderColumn { /// /// Name of the column. @@ -15591,10 +15591,10 @@ public sealed class PartitionedFileLoaderColumn /// /// Data type of the column. /// - public Microsoft.ML.Transforms.DataKind? Type { get; set; } + public Microsoft.ML.Data.DataKind? Type { get; set; } /// - /// Source index of the column. + /// Index of the directory representing this column. /// public int Source { get; set; } @@ -15609,17 +15609,17 @@ public sealed class SimplePathParserPartitionedPathParser : PartitionedPathParse /// /// Column definitions used to override the Partitioned Path Parser. Expected with the format name:type:numeric-source, e.g. col=MyFeature:R4:1 /// - public Microsoft.ML.Runtime.PartitionedFileLoaderColumn[] Columns { get; set; } + public PartitionedFileLoaderColumn[] Columns { get; set; } /// /// Data type of each column. /// - public Microsoft.ML.Transforms.DataKind Type { get; set; } = Microsoft.ML.Transforms.DataKind.TX; + public Microsoft.ML.Data.DataKind Type { get; set; } = Microsoft.ML.Data.DataKind.TX; internal override string ComponentName => "SimplePathParser"; } - public abstract class RegressionLossFunction : ComponentKind {} + public abstract class RegressionLossFunction : ComponentKind { } @@ -15656,7 +15656,7 @@ public sealed class TweedieLossRegressionLossFunction : RegressionLossFunction internal override string ComponentName => "TweedieLoss"; } - public abstract class SDCAClassificationLossFunction : ComponentKind {} + public abstract class SDCAClassificationLossFunction : ComponentKind { } @@ -15698,7 +15698,7 @@ public sealed class SmoothedHingeLossSDCAClassificationLossFunction : SDCAClassi internal override string ComponentName => "SmoothedHingeLoss"; } - public abstract class SDCARegressionLossFunction : ComponentKind {} + public abstract class SDCARegressionLossFunction : ComponentKind { } @@ -15710,7 +15710,7 @@ public sealed class SquaredLossSDCARegressionLossFunction : SDCARegressionLossFu internal override string ComponentName => "SquaredLoss"; } - public abstract class SearchTerminator : ComponentKind {} + public abstract class SearchTerminator : ComponentKind { } @@ -15727,7 +15727,7 @@ public sealed class IterationLimitedSearchTerminator : SearchTerminator internal override string ComponentName => "IterationLimited"; } - public abstract class StopWordsRemover : ComponentKind {} + public abstract class StopWordsRemover : ComponentKind { } diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs index 234c87fade..83a72bf3d8 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs @@ -121,7 +121,7 @@ public static string GetInputType(ModuleCatalog catalog, Type inputType, case TlcModule.DataKind.FileHandle: return GetCSharpTypeName(inputType); case TlcModule.DataKind.Array: - return GetInputType(catalog, inputType.GetElementType(), typesSymbolTable) + "[]"; + return GetInputType(catalog, inputType.GetElementType(), typesSymbolTable, rootNameSpace) + "[]"; case TlcModule.DataKind.Component: string kind; bool success = catalog.TryGetComponentKind(type, out kind); @@ -136,13 +136,13 @@ public static string GetInputType(ModuleCatalog catalog, Type inputType, return $"{enumName}"; default: if (isNullable) - return rootNameSpace + typesSymbolTable[type.FullName]; + return GetEnumName(type, typesSymbolTable, rootNameSpace); ; if (isOptional) - return $"Optional<{rootNameSpace + typesSymbolTable[type.FullName]}>"; + return $"Optional<{GetEnumName(type, typesSymbolTable, rootNameSpace)}>"; if (typesSymbolTable.ContainsKey(type.FullName)) - return rootNameSpace + typesSymbolTable[type.FullName]; + return GetEnumName(type, typesSymbolTable, rootNameSpace); else - return GetSymbolFromType(typesSymbolTable, type, rootNameSpace); + return GetEnumName(type, typesSymbolTable, rootNameSpace); ; } } @@ -330,7 +330,7 @@ public static string GetValue(ModuleCatalog catalog, Type fieldType, object fiel var properties = propertyBag.Count > 0 ? $" {{ {string.Join(", ", propertyBag)} }}" : ""; return $"new {GetComponentName(componentInfo)}(){properties}"; case TlcModule.DataKind.Unknown: - return $"new {rootNameSpace + typesSymbolTable[fieldType.FullName]}()"; + return $"new {GetEnumName(fieldType, typesSymbolTable, rootNameSpace)}()"; default: return fieldValue.ToString(); } @@ -347,12 +347,13 @@ public static string GetComponentName(ModuleCatalog.ComponentInfo component) return $"{Capitalize(component.Name)}{component.Kind}"; } - public static string GetEnumName(Type type, Dictionary typesSymbolTable, string rootNamespace = "") + public static string GetEnumName(Type type, Dictionary typesSymbolTable, string rootNamespace) { - if (typesSymbolTable.ContainsKey(type.FullName)) - return rootNamespace + typesSymbolTable[type.FullName]; - else - return GetSymbolFromType(typesSymbolTable, type, rootNamespace); + if (!typesSymbolTable.TryGetValue(type.FullName, out string fullname)) + fullname = GetSymbolFromType(typesSymbolTable, type, rootNamespace); + if (fullname.StartsWith(rootNamespace)) + return fullname.Substring(rootNamespace.Length + 1); + else return fullname; } public static string GetJsonFromField(string fieldName, Type fieldType) @@ -408,7 +409,7 @@ public static string GetJsonFromField(string fieldName, Type fieldType) private readonly string _regenerate; private readonly HashSet _excludedSet; private const string RegistrationName = "CSharpApiGenerator"; - public Dictionary TypesSymbolTable = new Dictionary(); + private Dictionary _typesSymbolTable = new Dictionary(); public CSharpApiGenerator(IHostEnvironment env, Arguments args, string regenerate) { @@ -456,12 +457,12 @@ public void Generate(IEnumerable infos) writer.WriteLine("{"); writer.Indent(); - foreach (var kind in catalog.GetAllComponentKinds().OrderBy(x => x)) + foreach (var kind in catalog.GetAllComponentKinds()) { // Generate kind base class GenerateComponentKind(writer, kind); - foreach (var component in catalog.GetAllComponents(kind).OrderBy(x => x.Name)) + foreach (var component in catalog.GetAllComponents(kind)) { // Generate component GenerateComponent(writer, component, catalog); @@ -594,8 +595,8 @@ private static string GetSymbolFromType(Dictionary typesSymbolTa name += nestedNames[i]; Contracts.Assert(typesSymbolTable.Select(kvp => kvp.Value).All(str => string.Compare(str, name) != 0)); - - return "Microsoft.ML." + name; + typesSymbolTable[type.FullName] = name; + return name; } private void GenerateEnums(IndentingTextWriter writer, Type inputType, string currentNamespace) @@ -612,7 +613,7 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) type = type.GetGenericArguments()[0]; - if (TypesSymbolTable.ContainsKey(type.FullName)) + if (_typesSymbolTable.ContainsKey(type.FullName)) continue; if (!type.IsEnum) @@ -625,13 +626,13 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu var enumType = Enum.GetUnderlyingType(type); - TypesSymbolTable[type.FullName] = GetSymbolFromType(TypesSymbolTable, type, currentNamespace); + var symbolName = GetSymbolFromType(_typesSymbolTable, type, currentNamespace); if (enumType == typeof(int)) - writer.WriteLine($"public enum {TypesSymbolTable[type.FullName].Substring(TypesSymbolTable[type.FullName].LastIndexOf('.') + 1)}"); + writer.WriteLine($"public enum {symbolName.Substring(symbolName.LastIndexOf('.') + 1)}"); else { Contracts.Assert(enumType == typeof(byte)); - writer.WriteLine($"public enum {TypesSymbolTable[type.FullName].Substring(TypesSymbolTable[type.FullName].LastIndexOf('.') + 1)} : byte"); + writer.WriteLine($"public enum {symbolName.Substring(symbolName.LastIndexOf('.') + 1)} : byte"); } writer.Write("{"); @@ -707,23 +708,23 @@ private void GenerateStructs(IndentingTextWriter writer, if (typeEnum != TlcModule.DataKind.Unknown) continue; - if (TypesSymbolTable.ContainsKey(type.FullName)) + if (_typesSymbolTable.ContainsKey(type.FullName)) continue; - - TypesSymbolTable[type.FullName] = GetSymbolFromType(TypesSymbolTable, type, currentNamespace); + GenerateEnums(writer, type,currentNamespace); + GenerateStructs(writer, type, catalog, currentNamespace); + var symbolName = GetSymbolFromType(_typesSymbolTable, type, currentNamespace); string classBase = ""; if (type.IsSubclassOf(typeof(OneToOneColumn))) - classBase = $" : OneToOneColumn<{TypesSymbolTable[type.FullName].Substring(TypesSymbolTable[type.FullName].LastIndexOf('.') + 1)}>, IOneToOneColumn"; + classBase = $" : OneToOneColumn<{symbolName.Substring(symbolName.LastIndexOf('.') + 1)}>, IOneToOneColumn"; else if (type.IsSubclassOf(typeof(ManyToOneColumn))) - classBase = $" : ManyToOneColumn<{TypesSymbolTable[type.FullName].Substring(TypesSymbolTable[type.FullName].LastIndexOf('.') + 1)}>, IManyToOneColumn"; - writer.WriteLine($"public sealed partial class {TypesSymbolTable[type.FullName].Substring(TypesSymbolTable[type.FullName].LastIndexOf('.') + 1)}{classBase}"); + classBase = $" : ManyToOneColumn<{symbolName.Substring(symbolName.LastIndexOf('.') + 1)}>, IManyToOneColumn"; + writer.WriteLine($"public sealed partial class {symbolName.Substring(symbolName.LastIndexOf('.') + 1)}{classBase}"); writer.WriteLine("{"); writer.Indent(); - GenerateInputFields(writer, type, catalog, TypesSymbolTable); + GenerateInputFields(writer, type, catalog, _typesSymbolTable, currentNamespace); writer.Outdent(); writer.WriteLine("}"); writer.WriteLine(); - GenerateStructs(writer, type, catalog, currentNamespace); } } @@ -858,12 +859,12 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, writer.Indent(); if (isArray) { - writer.WriteLine($"var list = {fieldName} == null ? new List<{TypesSymbolTable[type.FullName]}>() : new List<{TypesSymbolTable[type.FullName]}>({fieldName});"); - writer.WriteLine($"list.Add(OneToOneColumn<{TypesSymbolTable[type.FullName]}>.Create(source));"); + writer.WriteLine($"var list = {fieldName} == null ? new List<{_typesSymbolTable[type.FullName]}>() : new List<{_typesSymbolTable[type.FullName]}>({fieldName});"); + writer.WriteLine($"list.Add(OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(source));"); writer.WriteLine($"{fieldName} = list.ToArray();"); } else - writer.WriteLine($"{fieldName} = OneToOneColumn<{TypesSymbolTable[type.FullName]}>.Create(source);"); + writer.WriteLine($"{fieldName} = OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(source);"); writer.Outdent(); writer.WriteLine("}"); writer.WriteLine(); @@ -872,12 +873,12 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, writer.Indent(); if (isArray) { - writer.WriteLine($"var list = {fieldName} == null ? new List<{TypesSymbolTable[type.FullName]}>() : new List<{TypesSymbolTable[type.FullName]}>({fieldName});"); - writer.WriteLine($"list.Add(OneToOneColumn<{TypesSymbolTable[type.FullName]}>.Create(name, source));"); + writer.WriteLine($"var list = {fieldName} == null ? new List<{_typesSymbolTable[type.FullName]}>() : new List<{_typesSymbolTable[type.FullName]}>({fieldName});"); + writer.WriteLine($"list.Add(OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source));"); writer.WriteLine($"{fieldName} = list.ToArray();"); } else - writer.WriteLine($"{fieldName} = OneToOneColumn<{TypesSymbolTable[type.FullName]}>.Create(name, source);"); + writer.WriteLine($"{fieldName} = OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source);"); writer.Outdent(); writer.WriteLine("}"); writer.WriteLine(); @@ -905,12 +906,12 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, writer.Indent(); if (isArray) { - writer.WriteLine($"var list = {fieldName} == null ? new List<{TypesSymbolTable[type.FullName]}>() : new List<{TypesSymbolTable[type.FullName]}>({fieldName});"); - writer.WriteLine($"list.Add(ManyToOneColumn<{TypesSymbolTable[type.FullName]}>.Create(name, source));"); + writer.WriteLine($"var list = {fieldName} == null ? new List<{_typesSymbolTable[type.FullName]}>() : new List<{_typesSymbolTable[type.FullName]}>({fieldName});"); + writer.WriteLine($"list.Add(ManyToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source));"); writer.WriteLine($"{fieldName} = list.ToArray();"); } else - writer.WriteLine($"{fieldName} = ManyToOneColumn<{TypesSymbolTable[type.FullName]}>.Create(name, source);"); + writer.WriteLine($"{fieldName} = ManyToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source);"); writer.Outdent(); writer.WriteLine("}"); writer.WriteLine(); @@ -935,9 +936,9 @@ private void GenerateInput(IndentingTextWriter writer, classBase += ", Microsoft.ML.ILearningPipelineItem"; } - GenerateEnums(writer, entryPointInfo.InputType, classAndMethod.Item1); + GenerateEnums(writer, entryPointInfo.InputType, "Microsoft.ML." + classAndMethod.Item1); writer.WriteLine(); - GenerateStructs(writer, entryPointInfo.InputType, catalog, classAndMethod.Item1); + GenerateStructs(writer, entryPointInfo.InputType, catalog, "Microsoft.ML." + classAndMethod.Item1); writer.WriteLine("/// "); foreach (var line in entryPointInfo.Description.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) writer.WriteLine($"/// {line}"); @@ -955,7 +956,7 @@ private void GenerateInput(IndentingTextWriter writer, GenerateColumnAddMethods(writer, entryPointInfo.InputType, catalog, classAndMethod.Item2, out Type transformType); writer.WriteLine(); - GenerateInputFields(writer, entryPointInfo.InputType, catalog, TypesSymbolTable); + GenerateInputFields(writer, entryPointInfo.InputType, catalog, _typesSymbolTable, "Microsoft.ML." + classAndMethod.Item1); writer.WriteLine(); GenerateOutput(writer, entryPointInfo, out HashSet outputVariableNames); @@ -1057,8 +1058,8 @@ private static void GenerateApplyFunction(IndentingTextWriter writer, ModuleCata writer.WriteLine("}"); } - private static void GenerateInputFields(IndentingTextWriter writer, - Type inputType, ModuleCatalog catalog, Dictionary typesSymbolTable, string rootNameSpace = "") + private void GenerateInputFields(IndentingTextWriter writer, + Type inputType, ModuleCatalog catalog, Dictionary typesSymbolTable, string rootNameSpace) { var defaults = Activator.CreateInstance(inputType); foreach (var fieldInfo in inputType.GetFields()) @@ -1191,7 +1192,7 @@ private void GenerateComponent(IndentingTextWriter writer, ModuleCatalog.Compone writer.WriteLine($"public sealed class {GeneratorUtils.GetComponentName(component)} : {component.Kind}"); writer.WriteLine("{"); writer.Indent(); - GenerateInputFields(writer, component.ArgumentType, catalog, TypesSymbolTable, "Microsoft.ML."); + GenerateInputFields(writer, component.ArgumentType, catalog, _typesSymbolTable, "Runtime"); writer.WriteLine($"internal override string ComponentName => \"{component.Name}\";"); writer.Outdent(); writer.WriteLine("}"); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs index b42dee2d52..aa4abe1752 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs @@ -303,14 +303,14 @@ public void TestCrossValidationMacro() { Name = "Label", Source = new [] { new TextLoaderRange(11) }, - Type = DataKind.Num + Type = ML.Data.DataKind.Num }, new TextLoaderColumn() { Name = "Features", Source = new [] { new TextLoaderRange(0,10) }, - Type = DataKind.Num + Type = ML.Data.DataKind.Num } } } @@ -666,7 +666,7 @@ public void TestCrossValidationMacroWithNonDefaultNames() importInput.Arguments.Column = new TextLoaderColumn[] { new TextLoaderColumn { Name = "Label", Source = new[] { new TextLoaderRange(0) } }, - new TextLoaderColumn { Name = "Workclass", Source = new[] { new TextLoaderRange(1) }, Type = DataKind.Text }, + new TextLoaderColumn { Name = "Workclass", Source = new[] { new TextLoaderRange(1) }, Type = ML.Data.DataKind.Text }, new TextLoaderColumn { Name = "Features", Source = new[] { new TextLoaderRange(9, 14) } } }; var importOutput = experiment.Add(importInput); diff --git a/test/Microsoft.ML.TestFramework/ModelHelper.cs b/test/Microsoft.ML.TestFramework/ModelHelper.cs index edf4408bcb..42c684e51a 100644 --- a/test/Microsoft.ML.TestFramework/ModelHelper.cs +++ b/test/Microsoft.ML.TestFramework/ModelHelper.cs @@ -70,147 +70,147 @@ private static ITransformModel CreateKcHousePricePredictorModel(string dataPath) { Name = "Id", Source = new [] { new TextLoaderRange(0) }, - Type = Runtime.Data.DataKind.Text + Type = Data.DataKind.Text }, new TextLoaderColumn() { Name = "Date", Source = new [] { new TextLoaderRange(1) }, - Type = Runtime.Data.DataKind.Text + Type = Data.DataKind.Text }, new TextLoaderColumn() { Name = "Label", Source = new [] { new TextLoaderRange(2) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Bedrooms", Source = new [] { new TextLoaderRange(3) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Bathrooms", Source = new [] { new TextLoaderRange(4) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLiving", Source = new [] { new TextLoaderRange(5) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLot", Source = new [] { new TextLoaderRange(6) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Floors", Source = new [] { new TextLoaderRange(7) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Waterfront", Source = new [] { new TextLoaderRange(8) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "View", Source = new [] { new TextLoaderRange(9) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Condition", Source = new [] { new TextLoaderRange(10) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Grade", Source = new [] { new TextLoaderRange(11) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftAbove", Source = new [] { new TextLoaderRange(12) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftBasement", Source = new [] { new TextLoaderRange(13) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "YearBuilt", Source = new [] { new TextLoaderRange(14) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "YearRenovated", Source = new [] { new TextLoaderRange(15) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Zipcode", Source = new [] { new TextLoaderRange(16) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Lat", Source = new [] { new TextLoaderRange(17) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Long", Source = new [] { new TextLoaderRange(18) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLiving15", Source = new [] { new TextLoaderRange(19) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLot15", Source = new [] { new TextLoaderRange(20) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, } } diff --git a/test/Microsoft.ML.Tests/OnnxTests.cs b/test/Microsoft.ML.Tests/OnnxTests.cs index 6910aba70b..477a9c6fa6 100644 --- a/test/Microsoft.ML.Tests/OnnxTests.cs +++ b/test/Microsoft.ML.Tests/OnnxTests.cs @@ -52,14 +52,14 @@ public void BinaryClassificationSaveModelToOnnxTest() { Name = "Label", Source = new [] { new TextLoaderRange(0) }, - Type = DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Features", Source = new [] { new TextLoaderRange(1, 9) }, - Type = DataKind.Num + Type = Data.DataKind.Num } } } diff --git a/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs index 7a31f17d96..1ebc2489ec 100644 --- a/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs @@ -211,14 +211,14 @@ private LearningPipeline PreparePipeline() { Name = "Label", Source = new [] { new TextLoaderRange(0) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SentimentText", Source = new [] { new TextLoaderRange(1) }, - Type = Runtime.Data.DataKind.Text + Type = Data.DataKind.Text } } } @@ -265,14 +265,14 @@ private Data.TextLoader PrepareTextLoaderTestData() { Name = "Label", Source = new [] { new TextLoaderRange(0) }, - Type = Runtime.Data.DataKind.Num + Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SentimentText", Source = new [] { new TextLoaderRange(1) }, - Type = Runtime.Data.DataKind.Text + Type = Data.DataKind.Text } } } From bc60787cb98db0f5f96955aebc51620343431ce7 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Fri, 8 Jun 2018 17:12:09 -0700 Subject: [PATCH 02/11] some refactoring, to be continue... --- src/Microsoft.ML/CSharpApi.cs | 609 +++++++------- .../Internal/Tools/CSharpApiGenerator.cs | 774 ++++-------------- .../Runtime/Internal/Tools/GeneratorUtils.cs | 467 +++++++++++ test/Microsoft.ML.Tests/CSharpCodeGen.cs | 2 +- 4 files changed, 918 insertions(+), 934 deletions(-) create mode 100644 src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs diff --git a/src/Microsoft.ML/CSharpApi.cs b/src/Microsoft.ML/CSharpApi.cs index f1dd8a051c..3cdbd527f9 100644 --- a/src/Microsoft.ML/CSharpApi.cs +++ b/src/Microsoft.ML/CSharpApi.cs @@ -1644,25 +1644,25 @@ public TextLoader(string filePath) { _inputFilePath = filePath; } - + public void SetInput(IHostEnvironment env, Experiment experiment) { IFileHandle inputFile = new SimpleFileHandle(env, _inputFilePath, false, false); experiment.SetInput(InputFile, inputFile); } - + public Var GetInputData() => null; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { Contracts.Assert(previousStep == null); - + return new TextLoaderPipelineStep(experiment.Add(this)); } - + private class TextLoaderPipelineStep : ILearningPipelineDataStep { - public TextLoaderPipelineStep(Output output) + public TextLoaderPipelineStep (Output output) { Data = output.Data; Model = null; @@ -2444,7 +2444,7 @@ public sealed class Output } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2519,7 +2519,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2656,7 +2656,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2774,7 +2774,7 @@ public sealed class Output } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2919,7 +2919,7 @@ public sealed class Output } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -2983,7 +2983,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -3105,7 +3105,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -3680,13 +3680,13 @@ public sealed partial class AveragedPerceptronBinaryClassifier : Microsoft.ML.Ru /// /// Learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[] { 0.01f, 0.1f, 0.5f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[]{0.01f, 0.1f, 0.5f, 1f})] public float LearningRate { get; set; } = 1f; /// /// Decrease learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[]{false, true})] public bool DecreaseLearningRate { get; set; } = false; /// @@ -3728,7 +3728,7 @@ public sealed partial class AveragedPerceptronBinaryClassifier : Microsoft.ML.Ru /// /// Number of iterations /// - [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize: 10, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize:10, isLogScale:true)] public int NumIterations { get; set; } = 1; /// @@ -3739,13 +3739,13 @@ public sealed partial class AveragedPerceptronBinaryClassifier : Microsoft.ML.Ru /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] public float InitWtsDiameter { get; set; } /// /// Whether to shuffle for each training iteration /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -3788,7 +3788,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -3968,19 +3968,19 @@ public sealed partial class FastForestBinaryClassifier : Microsoft.ML.Runtime.En /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -4088,7 +4088,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -4250,19 +4250,19 @@ public sealed partial class FastForestRegressor : Microsoft.ML.Runtime.EntryPoin /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -4370,7 +4370,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -4479,19 +4479,19 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -4648,19 +4648,19 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -4768,7 +4768,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -4905,19 +4905,19 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -5074,19 +5074,19 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -5194,7 +5194,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IRan } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -5291,19 +5291,19 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -5460,19 +5460,19 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -5580,7 +5580,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -5682,19 +5682,19 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -5851,19 +5851,19 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -5971,7 +5971,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6038,7 +6038,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// Total number of iterations over all features /// - [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[] { 200, 1500, 9500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[]{200, 1500, 9500})] public int NumIterations { get; set; } = 9500; /// @@ -6049,7 +6049,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale:true)] public double LearningRates { get; set; } = 0.002d; /// @@ -6080,7 +6080,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// Minimum number of training instances required to form a partition /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[]{1, 10, 50})] public int MinDocuments { get; set; } = 10; /// @@ -6128,7 +6128,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6179,7 +6179,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// Total number of iterations over all features /// - [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[] { 200, 1500, 9500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumIterations", new object[]{200, 1500, 9500})] public int NumIterations { get; set; } = 9500; /// @@ -6190,7 +6190,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.001f, 0.1f, isLogScale:true)] public double LearningRates { get; set; } = 0.002d; /// @@ -6221,7 +6221,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// Minimum number of training instances required to form a partition /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocuments", new object[]{1, 10, 50})] public int MinDocuments { get; set; } = 10; /// @@ -6269,7 +6269,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6317,7 +6317,7 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry /// /// The number of clusters /// - [TlcModule.SweepableDiscreteParamAttribute("K", new object[] { 5, 10, 20, 40 })] + [TlcModule.SweepableDiscreteParamAttribute("K", new object[]{5, 10, 20, 40})] public int K { get; set; } = 5; /// @@ -6380,7 +6380,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IClu } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6421,7 +6421,7 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Regularizer constant /// - [TlcModule.SweepableFloatParamAttribute("Lambda", 1E-05f, 0.1f, stepSize: 10, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Lambda", 1E-05f, 0.1f, stepSize:10, isLogScale:true)] public float Lambda { get; set; } = 0.001f; /// @@ -6432,13 +6432,13 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Perform projection to unit-ball? Typically used with batch size > 1. /// - [TlcModule.SweepableDiscreteParamAttribute("PerformProjection", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("PerformProjection", new object[]{false, true})] public bool PerformProjection { get; set; } = false; /// /// No bias /// - [TlcModule.SweepableDiscreteParamAttribute("NoBias", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("NoBias", new object[]{false, true})] public bool NoBias { get; set; } = false; /// @@ -6455,7 +6455,7 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Number of iterations /// - [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize: 10, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize:10, isLogScale:true)] public int NumIterations { get; set; } = 1; /// @@ -6466,13 +6466,13 @@ public sealed partial class LinearSvmBinaryClassifier : Microsoft.ML.Runtime.Ent /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] public float InitWtsDiameter { get; set; } /// /// Whether to shuffle for each training iteration /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -6515,7 +6515,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6561,25 +6561,25 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// L2 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps: 4)] + [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps:4)] public float L2Weight { get; set; } = 1f; /// /// L1 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps: 4)] + [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps:4)] public float L1Weight { get; set; } = 1f; /// /// Tolerance parameter for optimization convergence. Lower = slower, more accurate /// - [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[] { 0.0001f, 1E-07f })] + [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] public float OptTol { get; set; } = 1E-07f; /// /// Memory size for L-BFGS. Lower=faster, less accurate /// - [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] public int MemorySize { get; set; } = 20; /// @@ -6601,7 +6601,7 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] public float InitWtsDiameter { get; set; } /// @@ -6617,7 +6617,7 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Force densification of the internal optimization vectors /// - [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[]{false, true})] public bool DenseOptimizer { get; set; } = false; /// @@ -6665,7 +6665,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6711,25 +6711,25 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// L2 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps: 4)] + [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps:4)] public float L2Weight { get; set; } = 1f; /// /// L1 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps: 4)] + [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps:4)] public float L1Weight { get; set; } = 1f; /// /// Tolerance parameter for optimization convergence. Lower = slower, more accurate /// - [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[] { 0.0001f, 1E-07f })] + [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] public float OptTol { get; set; } = 1E-07f; /// /// Memory size for L-BFGS. Lower=faster, less accurate /// - [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] public int MemorySize { get; set; } = 20; /// @@ -6751,7 +6751,7 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] public float InitWtsDiameter { get; set; } /// @@ -6767,7 +6767,7 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// Force densification of the internal optimization vectors /// - [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[]{false, true})] public bool DenseOptimizer { get; set; } = false; /// @@ -6815,7 +6815,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6888,7 +6888,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -6935,13 +6935,13 @@ public sealed partial class OnlineGradientDescentRegressor : Microsoft.ML.Runtim /// /// Learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[] { 0.01f, 0.1f, 0.5f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("LearningRate", new object[]{0.01f, 0.1f, 0.5f, 1f})] public float LearningRate { get; set; } = 0.1f; /// /// Decrease learning rate /// - [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("DecreaseLearningRate", new object[]{false, true})] public bool DecreaseLearningRate { get; set; } = true; /// @@ -6983,7 +6983,7 @@ public sealed partial class OnlineGradientDescentRegressor : Microsoft.ML.Runtim /// /// Number of iterations /// - [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize: 10, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumIterations", 1, 100, stepSize:10, isLogScale:true)] public int NumIterations { get; set; } = 1; /// @@ -6994,13 +6994,13 @@ public sealed partial class OnlineGradientDescentRegressor : Microsoft.ML.Runtim /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] public float InitWtsDiameter { get; set; } /// /// Whether to shuffle for each training iteration /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -7043,7 +7043,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7084,7 +7084,7 @@ public sealed partial class OrdinaryLeastSquaresRegressor : Microsoft.ML.Runtime /// /// L2 regularization weight /// - [TlcModule.SweepableDiscreteParamAttribute("L2Weight", new object[] { 1E-06f, 0.1f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("L2Weight", new object[]{1E-06f, 0.1f, 1f})] public float L2Weight { get; set; } = 1E-06f; /// @@ -7132,7 +7132,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7173,19 +7173,19 @@ public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoint /// /// The number of components in the PCA /// - [TlcModule.SweepableDiscreteParamAttribute("Rank", new object[] { 10, 20, 40, 80 })] + [TlcModule.SweepableDiscreteParamAttribute("Rank", new object[]{10, 20, 40, 80})] public int Rank { get; set; } = 20; /// /// Oversampling parameter for randomized PCA training /// - [TlcModule.SweepableDiscreteParamAttribute("Oversampling", new object[] { 10, 20, 40 })] + [TlcModule.SweepableDiscreteParamAttribute("Oversampling", new object[]{10, 20, 40})] public int Oversampling { get; set; } = 20; /// /// If enabled, data is centered to be zero mean /// - [TlcModule.SweepableDiscreteParamAttribute("Center", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Center", new object[]{false, true})] public bool Center { get; set; } = true; /// @@ -7228,7 +7228,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IAno } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7269,25 +7269,25 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// L2 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps: 4)] + [TlcModule.SweepableFloatParamAttribute("L2Weight", 0f, 1f, numSteps:4)] public float L2Weight { get; set; } = 1f; /// /// L1 regularization weight /// - [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps: 4)] + [TlcModule.SweepableFloatParamAttribute("L1Weight", 0f, 1f, numSteps:4)] public float L1Weight { get; set; } = 1f; /// /// Tolerance parameter for optimization convergence. Lower = slower, more accurate /// - [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[] { 0.0001f, 1E-07f })] + [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] public float OptTol { get; set; } = 1E-07f; /// /// Memory size for L-BFGS. Lower=faster, less accurate /// - [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] public int MemorySize { get; set; } = 20; /// @@ -7309,7 +7309,7 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// Init weights diameter /// - [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps: 5)] + [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0f, 1f, numSteps:5)] public float InitWtsDiameter { get; set; } /// @@ -7325,7 +7325,7 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// Force densification of the internal optimization vectors /// - [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[]{false, true})] public bool DenseOptimizer { get; set; } = false; /// @@ -7373,7 +7373,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7436,13 +7436,13 @@ public sealed partial class StochasticDualCoordinateAscentBinaryClassifier : Mic /// /// L2 regularizer constant. By default the l2 constant is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { "", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f })] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{"", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f})] public float? L2Const { get; set; } /// /// L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[] { "", 0f, 0.25f, 0.5f, 0.75f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[]{"", 0f, 0.25f, 0.5f, 0.75f, 1f})] public float? L1Threshold { get; set; } /// @@ -7453,19 +7453,19 @@ public sealed partial class StochasticDualCoordinateAscentBinaryClassifier : Mic /// /// The tolerance for the ratio between duality gap and primal loss for convergence checking. /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.001f, 0.01f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.001f, 0.01f, 0.1f, 0.2f})] public float ConvergenceTolerance { get; set; } = 0.1f; /// /// Maximum number of iterations; set to 1 to simulate online learning. Defaults to automatic. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { "", 10, 20, 100 })] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{"", 10, 20, 100})] public int? MaxIterations { get; set; } /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -7476,7 +7476,7 @@ public sealed partial class StochasticDualCoordinateAscentBinaryClassifier : Mic /// /// The learning rate for adjusting bias from being regularized. /// - [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[] { 0f, 0.01f, 0.1f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[]{0f, 0.01f, 0.1f, 1f})] public float BiasLearningRate { get; set; } /// @@ -7514,7 +7514,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7561,13 +7561,13 @@ public sealed partial class StochasticDualCoordinateAscentClassifier : Microsoft /// /// L2 regularizer constant. By default the l2 constant is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { "", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f })] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{"", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f})] public float? L2Const { get; set; } /// /// L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[] { "", 0f, 0.25f, 0.5f, 0.75f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[]{"", 0f, 0.25f, 0.5f, 0.75f, 1f})] public float? L1Threshold { get; set; } /// @@ -7578,19 +7578,19 @@ public sealed partial class StochasticDualCoordinateAscentClassifier : Microsoft /// /// The tolerance for the ratio between duality gap and primal loss for convergence checking. /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.001f, 0.01f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.001f, 0.01f, 0.1f, 0.2f})] public float ConvergenceTolerance { get; set; } = 0.1f; /// /// Maximum number of iterations; set to 1 to simulate online learning. Defaults to automatic. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { "", 10, 20, 100 })] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{"", 10, 20, 100})] public int? MaxIterations { get; set; } /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -7601,7 +7601,7 @@ public sealed partial class StochasticDualCoordinateAscentClassifier : Microsoft /// /// The learning rate for adjusting bias from being regularized. /// - [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[] { 0f, 0.01f, 0.1f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[]{0f, 0.01f, 0.1f, 1f})] public float BiasLearningRate { get; set; } /// @@ -7639,7 +7639,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7686,13 +7686,13 @@ public sealed partial class StochasticDualCoordinateAscentRegressor : Microsoft. /// /// L2 regularizer constant. By default the l2 constant is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { "", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f })] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{"", 1E-07f, 1E-06f, 1E-05f, 0.0001f, 0.001f, 0.01f})] public float? L2Const { get; set; } /// /// L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set. /// - [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[] { "", 0f, 0.25f, 0.5f, 0.75f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("L1Threshold", new object[]{"", 0f, 0.25f, 0.5f, 0.75f, 1f})] public float? L1Threshold { get; set; } /// @@ -7703,19 +7703,19 @@ public sealed partial class StochasticDualCoordinateAscentRegressor : Microsoft. /// /// The tolerance for the ratio between duality gap and primal loss for convergence checking. /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.001f, 0.01f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.001f, 0.01f, 0.1f, 0.2f})] public float ConvergenceTolerance { get; set; } = 0.01f; /// /// Maximum number of iterations; set to 1 to simulate online learning. Defaults to automatic. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { "", 10, 20, 100 })] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{"", 10, 20, 100})] public int? MaxIterations { get; set; } /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -7726,7 +7726,7 @@ public sealed partial class StochasticDualCoordinateAscentRegressor : Microsoft. /// /// The learning rate for adjusting bias from being regularized. /// - [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[] { 0f, 0.01f, 0.1f, 1f })] + [TlcModule.SweepableDiscreteParamAttribute("BiasLearningRate", new object[]{0f, 0.01f, 0.1f, 1f})] public float BiasLearningRate { get; set; } = 1f; /// @@ -7764,7 +7764,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7811,7 +7811,7 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// L2 regularizer constant /// - [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[] { 1E-07f, 5E-07f, 1E-06f, 5E-06f, 1E-05f })] + [TlcModule.SweepableDiscreteParamAttribute("L2Const", new object[]{1E-07f, 5E-07f, 1E-06f, 5E-06f, 1E-05f})] public float L2Const { get; set; } = 1E-06f; /// @@ -7822,13 +7822,13 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// Exponential moving averaged improvement tolerance for convergence /// - [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[] { 0.01f, 0.001f, 0.0001f, 1E-05f })] + [TlcModule.SweepableDiscreteParamAttribute("ConvergenceTolerance", new object[]{0.01f, 0.001f, 0.0001f, 1E-05f})] public double ConvergenceTolerance { get; set; } = 0.0001d; /// /// Maximum number of iterations; set to 1 to simulate online learning. /// - [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[] { 1, 5, 10, 20 })] + [TlcModule.SweepableDiscreteParamAttribute("MaxIterations", new object[]{1, 5, 10, 20})] public int MaxIterations { get; set; } = 20; /// @@ -7839,7 +7839,7 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// Shuffle data every epoch? /// - [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] + [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[]{false, true})] public bool Shuffle { get; set; } = true; /// @@ -7903,7 +7903,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin } public Var GetInputData() => TrainingData; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -7981,7 +7981,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8046,7 +8046,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8117,7 +8117,7 @@ public sealed partial class BinNormalizer : Microsoft.ML.Runtime.EntryPoints.Com public BinNormalizer() { } - + public BinNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -8128,7 +8128,7 @@ public BinNormalizer(params string[] inputColumns) } } } - + public BinNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8139,7 +8139,7 @@ public BinNormalizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8195,7 +8195,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8284,7 +8284,7 @@ public sealed partial class CategoricalHashOneHotVectorizer : Microsoft.ML.Runti public CategoricalHashOneHotVectorizer() { } - + public CategoricalHashOneHotVectorizer(params string[] inputColumns) { if (inputColumns != null) @@ -8295,7 +8295,7 @@ public CategoricalHashOneHotVectorizer(params string[] inputColumns) } } } - + public CategoricalHashOneHotVectorizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8306,7 +8306,7 @@ public CategoricalHashOneHotVectorizer(params ValueTuple[] input } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8372,7 +8372,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8459,7 +8459,7 @@ public sealed partial class CategoricalOneHotVectorizer : Microsoft.ML.Runtime.E public CategoricalOneHotVectorizer() { } - + public CategoricalOneHotVectorizer(params string[] inputColumns) { if (inputColumns != null) @@ -8470,7 +8470,7 @@ public CategoricalOneHotVectorizer(params string[] inputColumns) } } } - + public CategoricalOneHotVectorizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8481,7 +8481,7 @@ public CategoricalOneHotVectorizer(params ValueTuple[] inputOutp } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8547,7 +8547,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8603,7 +8603,7 @@ public sealed partial class CharacterTokenizer : Microsoft.ML.Runtime.EntryPoint public CharacterTokenizer() { } - + public CharacterTokenizer(params string[] inputColumns) { if (inputColumns != null) @@ -8614,7 +8614,7 @@ public CharacterTokenizer(params string[] inputColumns) } } } - + public CharacterTokenizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8625,7 +8625,7 @@ public CharacterTokenizer(params ValueTuple[] inputOutputColumns } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8671,7 +8671,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8727,12 +8727,12 @@ public sealed partial class ColumnConcatenator : Microsoft.ML.Runtime.EntryPoint public ColumnConcatenator() { } - + public ColumnConcatenator(string outputColumn, params string[] inputColumns) { AddColumn(outputColumn, inputColumns); } - + public void AddColumn(string name, params string[] source) { var list = Column == null ? new List() : new List(Column); @@ -8766,7 +8766,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8822,7 +8822,7 @@ public sealed partial class ColumnCopier : Microsoft.ML.Runtime.EntryPoints.Comm public ColumnCopier() { } - + public ColumnCopier(params string[] inputColumns) { if (inputColumns != null) @@ -8833,7 +8833,7 @@ public ColumnCopier(params string[] inputColumns) } } } - + public ColumnCopier(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -8844,7 +8844,7 @@ public ColumnCopier(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -8885,7 +8885,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -8950,7 +8950,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9015,7 +9015,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9081,7 +9081,7 @@ public sealed partial class ColumnTypeConverter : Microsoft.ML.Runtime.EntryPoin public ColumnTypeConverter() { } - + public ColumnTypeConverter(params string[] inputColumns) { if (inputColumns != null) @@ -9092,7 +9092,7 @@ public ColumnTypeConverter(params string[] inputColumns) } } } - + public ColumnTypeConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9103,7 +9103,7 @@ public ColumnTypeConverter(params ValueTuple[] inputOutputColumn } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9154,7 +9154,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9224,7 +9224,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9290,7 +9290,7 @@ public sealed partial class ConditionalNormalizer : Microsoft.ML.Runtime.EntryPo public ConditionalNormalizer() { } - + public ConditionalNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -9301,7 +9301,7 @@ public ConditionalNormalizer(params string[] inputColumns) } } } - + public ConditionalNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9312,7 +9312,7 @@ public ConditionalNormalizer(params ValueTuple[] inputOutputColu } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9363,7 +9363,7 @@ public sealed class Output } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9429,7 +9429,7 @@ public sealed class Output } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9583,7 +9583,7 @@ public sealed partial class Dictionarizer : Microsoft.ML.Runtime.EntryPoints.Com public Dictionarizer() { } - + public Dictionarizer(params string[] inputColumns) { if (inputColumns != null) @@ -9594,7 +9594,7 @@ public Dictionarizer(params string[] inputColumns) } } } - + public Dictionarizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9605,7 +9605,7 @@ public Dictionarizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -9666,7 +9666,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9731,7 +9731,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9801,7 +9801,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9881,7 +9881,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -9952,7 +9952,7 @@ public sealed partial class GlobalContrastNormalizer : Microsoft.ML.Runtime.Entr public GlobalContrastNormalizer() { } - + public GlobalContrastNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -9963,7 +9963,7 @@ public GlobalContrastNormalizer(params string[] inputColumns) } } } - + public GlobalContrastNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -9974,7 +9974,7 @@ public GlobalContrastNormalizer(params ValueTuple[] inputOutputC } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10030,7 +10030,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10111,7 +10111,7 @@ public sealed partial class HashConverter : Microsoft.ML.Runtime.EntryPoints.Com public HashConverter() { } - + public HashConverter(params string[] inputColumns) { if (inputColumns != null) @@ -10122,7 +10122,7 @@ public HashConverter(params string[] inputColumns) } } } - + public HashConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10133,7 +10133,7 @@ public HashConverter(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10194,7 +10194,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10250,7 +10250,7 @@ public sealed partial class KeyToTextConverter : Microsoft.ML.Runtime.EntryPoint public KeyToTextConverter() { } - + public KeyToTextConverter(params string[] inputColumns) { if (inputColumns != null) @@ -10261,7 +10261,7 @@ public KeyToTextConverter(params string[] inputColumns) } } } - + public KeyToTextConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10272,7 +10272,7 @@ public KeyToTextConverter(params ValueTuple[] inputOutputColumns } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10313,7 +10313,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10383,7 +10383,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10444,7 +10444,7 @@ public sealed partial class LabelIndicator : Microsoft.ML.Runtime.EntryPoints.Co public LabelIndicator() { } - + public LabelIndicator(params string[] inputColumns) { if (inputColumns != null) @@ -10455,7 +10455,7 @@ public LabelIndicator(params string[] inputColumns) } } } - + public LabelIndicator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10466,7 +10466,7 @@ public LabelIndicator(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10512,7 +10512,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10577,7 +10577,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10638,7 +10638,7 @@ public sealed partial class LogMeanVarianceNormalizer : Microsoft.ML.Runtime.Ent public LogMeanVarianceNormalizer() { } - + public LogMeanVarianceNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -10649,7 +10649,7 @@ public LogMeanVarianceNormalizer(params string[] inputColumns) } } } - + public LogMeanVarianceNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10660,7 +10660,7 @@ public LogMeanVarianceNormalizer(params ValueTuple[] inputOutput } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10711,7 +10711,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10785,7 +10785,7 @@ public sealed partial class LpNormalizer : Microsoft.ML.Runtime.EntryPoints.Comm public LpNormalizer() { } - + public LpNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -10796,7 +10796,7 @@ public LpNormalizer(params string[] inputColumns) } } } - + public LpNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10807,7 +10807,7 @@ public LpNormalizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -10858,7 +10858,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -10932,7 +10932,7 @@ public sealed partial class MeanVarianceNormalizer : Microsoft.ML.Runtime.EntryP public MeanVarianceNormalizer() { } - + public MeanVarianceNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -10943,7 +10943,7 @@ public MeanVarianceNormalizer(params string[] inputColumns) } } } - + public MeanVarianceNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -10954,7 +10954,7 @@ public MeanVarianceNormalizer(params ValueTuple[] inputOutputCol } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11010,7 +11010,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11052,7 +11052,7 @@ public sealed partial class MinMaxNormalizer : Microsoft.ML.Runtime.EntryPoints. public MinMaxNormalizer() { } - + public MinMaxNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -11063,7 +11063,7 @@ public MinMaxNormalizer(params string[] inputColumns) } } } - + public MinMaxNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11074,7 +11074,7 @@ public MinMaxNormalizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11125,7 +11125,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11208,7 +11208,7 @@ public sealed partial class MissingValueHandler : Microsoft.ML.Runtime.EntryPoin public MissingValueHandler() { } - + public MissingValueHandler(params string[] inputColumns) { if (inputColumns != null) @@ -11219,7 +11219,7 @@ public MissingValueHandler(params string[] inputColumns) } } } - + public MissingValueHandler(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11230,7 +11230,7 @@ public MissingValueHandler(params ValueTuple[] inputOutputColumn } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11286,7 +11286,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11342,7 +11342,7 @@ public sealed partial class MissingValueIndicator : Microsoft.ML.Runtime.EntryPo public MissingValueIndicator() { } - + public MissingValueIndicator(params string[] inputColumns) { if (inputColumns != null) @@ -11353,7 +11353,7 @@ public MissingValueIndicator(params string[] inputColumns) } } } - + public MissingValueIndicator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11364,7 +11364,7 @@ public MissingValueIndicator(params ValueTuple[] inputOutputColu } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11405,7 +11405,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11461,7 +11461,7 @@ public sealed partial class MissingValuesDropper : Microsoft.ML.Runtime.EntryPoi public MissingValuesDropper() { } - + public MissingValuesDropper(params string[] inputColumns) { if (inputColumns != null) @@ -11472,7 +11472,7 @@ public MissingValuesDropper(params string[] inputColumns) } } } - + public MissingValuesDropper(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11483,7 +11483,7 @@ public MissingValuesDropper(params ValueTuple[] inputOutputColum } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11524,7 +11524,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11594,7 +11594,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11680,7 +11680,7 @@ public sealed partial class MissingValueSubstitutor : Microsoft.ML.Runtime.Entry public MissingValueSubstitutor() { } - + public MissingValueSubstitutor(params string[] inputColumns) { if (inputColumns != null) @@ -11691,7 +11691,7 @@ public MissingValueSubstitutor(params string[] inputColumns) } } } - + public MissingValueSubstitutor(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11702,7 +11702,7 @@ public MissingValueSubstitutor(params ValueTuple[] inputOutputCo } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11753,7 +11753,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -11868,7 +11868,7 @@ public sealed partial class NGramTranslator : Microsoft.ML.Runtime.EntryPoints.C public NGramTranslator() { } - + public NGramTranslator(params string[] inputColumns) { if (inputColumns != null) @@ -11879,7 +11879,7 @@ public NGramTranslator(params string[] inputColumns) } } } - + public NGramTranslator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -11890,7 +11890,7 @@ public NGramTranslator(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -11956,7 +11956,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12016,7 +12016,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12081,7 +12081,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12162,7 +12162,7 @@ public sealed partial class PcaCalculator : Microsoft.ML.Runtime.EntryPoints.Com public PcaCalculator() { } - + public PcaCalculator(params string[] inputColumns) { if (inputColumns != null) @@ -12173,7 +12173,7 @@ public PcaCalculator(params string[] inputColumns) } } } - + public PcaCalculator(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -12184,7 +12184,7 @@ public PcaCalculator(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -12250,7 +12250,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12315,7 +12315,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12409,7 +12409,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12499,7 +12499,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12569,7 +12569,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12634,7 +12634,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12699,7 +12699,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12764,7 +12764,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12873,7 +12873,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12943,7 +12943,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -12985,7 +12985,7 @@ public sealed partial class SupervisedBinNormalizer : Microsoft.ML.Runtime.Entry public SupervisedBinNormalizer() { } - + public SupervisedBinNormalizer(params string[] inputColumns) { if (inputColumns != null) @@ -12996,7 +12996,7 @@ public SupervisedBinNormalizer(params string[] inputColumns) } } } - + public SupervisedBinNormalizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -13007,7 +13007,7 @@ public SupervisedBinNormalizer(params ValueTuple[] inputOutputCo } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -13073,7 +13073,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13174,12 +13174,12 @@ public sealed partial class TextFeaturizer : Microsoft.ML.Runtime.EntryPoints.Co public TextFeaturizer() { } - + public TextFeaturizer(string outputColumn, params string[] inputColumns) { AddColumn(outputColumn, inputColumns); } - + public void AddColumn(string name, params string[] source) { Column = ManyToOneColumn.Create(name, source); @@ -13269,7 +13269,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13311,7 +13311,7 @@ public sealed partial class TextToKeyConverter : Microsoft.ML.Runtime.EntryPoint public TextToKeyConverter() { } - + public TextToKeyConverter(params string[] inputColumns) { if (inputColumns != null) @@ -13322,7 +13322,7 @@ public TextToKeyConverter(params string[] inputColumns) } } } - + public TextToKeyConverter(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -13333,7 +13333,7 @@ public TextToKeyConverter(params ValueTuple[] inputOutputColumns } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -13394,7 +13394,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13511,7 +13511,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13604,7 +13604,7 @@ public sealed partial class WordTokenizer : Microsoft.ML.Runtime.EntryPoints.Com public WordTokenizer() { } - + public WordTokenizer(params string[] inputColumns) { if (inputColumns != null) @@ -13615,7 +13615,7 @@ public WordTokenizer(params string[] inputColumns) } } } - + public WordTokenizer(params ValueTuple[] inputOutputColumns) { if (inputOutputColumns != null) @@ -13626,7 +13626,7 @@ public WordTokenizer(params ValueTuple[] inputOutputColumns) } } } - + public void AddColumn(string source) { var list = Column == null ? new List() : new List(Column); @@ -13672,7 +13672,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra } public Var GetInputData() => Data; - + public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { if (previousStep != null) @@ -13704,7 +13704,7 @@ public WordTokenizerPipelineStep(Output output) namespace Runtime { - public abstract class AutoMlEngine : ComponentKind { } + public abstract class AutoMlEngine : ComponentKind {} @@ -13756,7 +13756,7 @@ public sealed class UniformRandomAutoMlEngine : AutoMlEngine internal override string ComponentName => "UniformRandom"; } - public abstract class AutoMlStateBase : ComponentKind { } + public abstract class AutoMlStateBase : ComponentKind {} public enum AutoInferenceAutoMlMlStateArgumentsMetrics { @@ -13827,13 +13827,10 @@ public sealed class AutoMlStateAutoMlStateBase : AutoMlStateBase internal override string ComponentName => "AutoMlState"; } - public abstract class CalibratorTrainer : ComponentKind { } + public abstract class CalibratorTrainer : ComponentKind {} - /// - /// - /// public sealed class FixedPlattCalibratorCalibratorTrainer : CalibratorTrainer { /// @@ -13851,9 +13848,6 @@ public sealed class FixedPlattCalibratorCalibratorTrainer : CalibratorTrainer - /// - /// - /// public sealed class NaiveCalibratorCalibratorTrainer : CalibratorTrainer { internal override string ComponentName => "NaiveCalibrator"; @@ -13861,9 +13855,6 @@ public sealed class NaiveCalibratorCalibratorTrainer : CalibratorTrainer - /// - /// - /// public sealed class PavCalibratorCalibratorTrainer : CalibratorTrainer { internal override string ComponentName => "PavCalibrator"; @@ -13879,7 +13870,7 @@ public sealed class PlattCalibratorCalibratorTrainer : CalibratorTrainer internal override string ComponentName => "PlattCalibrator"; } - public abstract class ClassificationLossFunction : ComponentKind { } + public abstract class ClassificationLossFunction : ComponentKind {} @@ -13936,7 +13927,7 @@ public sealed class SmoothedHingeLossClassificationLossFunction : Classification internal override string ComponentName => "SmoothedHingeLoss"; } - public abstract class EarlyStoppingCriterion : ComponentKind { } + public abstract class EarlyStoppingCriterion : ComponentKind {} @@ -14030,7 +14021,7 @@ public sealed class UPEarlyStoppingCriterion : EarlyStoppingCriterion internal override string ComponentName => "UP"; } - public abstract class FastTreeTrainer : ComponentKind { } + public abstract class FastTreeTrainer : ComponentKind {} @@ -14103,19 +14094,19 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -14272,19 +14263,19 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -14491,19 +14482,19 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -14660,19 +14651,19 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -14839,19 +14830,19 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -15008,19 +14999,19 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -15192,19 +15183,19 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// The learning rate /// - [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("LearningRates", 0.025f, 0.4f, isLogScale:true)] public double LearningRates { get; set; } = 0.2d; /// /// Shrinkage /// - [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale: true)] + [TlcModule.SweepableFloatParamAttribute("Shrinkage", 0.025f, 4f, isLogScale:true)] public double Shrinkage { get; set; } = 1d; /// /// Dropout rate for tree regularization /// - [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[] { 0f, 1E-09f, 0.05f, 0.1f, 0.2f })] + [TlcModule.SweepableDiscreteParamAttribute("DropoutRate", new object[]{0f, 1E-09f, 0.05f, 0.1f, 0.2f})] public double DropoutRate { get; set; } /// @@ -15361,19 +15352,19 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// The max number of leaves in each regression tree /// - [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize: 4, isLogScale: true)] + [TlcModule.SweepableLongParamAttribute("NumLeaves", 2, 128, stepSize:4, isLogScale:true)] public int NumLeaves { get; set; } = 20; /// /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data /// - [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[] { 1, 10, 50 })] + [TlcModule.SweepableDiscreteParamAttribute("MinDocumentsInLeafs", new object[]{1, 10, 50})] public int MinDocumentsInLeafs { get; set; } = 10; /// /// Number of weak hypotheses in the ensemble /// - [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[] { 20, 100, 500 })] + [TlcModule.SweepableDiscreteParamAttribute("NumTrees", new object[]{20, 100, 500})] public int NumTrees { get; set; } = 100; /// @@ -15474,7 +15465,7 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer internal override string ComponentName => "FastTreeTweedieRegression"; } - public abstract class NgramExtractor : ComponentKind { } + public abstract class NgramExtractor : ComponentKind {} @@ -15556,7 +15547,7 @@ public sealed class NGramHashNgramExtractor : NgramExtractor internal override string ComponentName => "NGramHash"; } - public abstract class ParallelTraining : ComponentKind { } + public abstract class ParallelTraining : ComponentKind {} @@ -15568,7 +15559,7 @@ public sealed class SingleParallelTraining : ParallelTraining internal override string ComponentName => "Single"; } - public abstract class PartitionedPathParser : ComponentKind { } + public abstract class PartitionedPathParser : ComponentKind {} @@ -15619,7 +15610,7 @@ public sealed class SimplePathParserPartitionedPathParser : PartitionedPathParse internal override string ComponentName => "SimplePathParser"; } - public abstract class RegressionLossFunction : ComponentKind { } + public abstract class RegressionLossFunction : ComponentKind {} @@ -15656,7 +15647,7 @@ public sealed class TweedieLossRegressionLossFunction : RegressionLossFunction internal override string ComponentName => "TweedieLoss"; } - public abstract class SDCAClassificationLossFunction : ComponentKind { } + public abstract class SDCAClassificationLossFunction : ComponentKind {} @@ -15698,7 +15689,7 @@ public sealed class SmoothedHingeLossSDCAClassificationLossFunction : SDCAClassi internal override string ComponentName => "SmoothedHingeLoss"; } - public abstract class SDCARegressionLossFunction : ComponentKind { } + public abstract class SDCARegressionLossFunction : ComponentKind {} @@ -15710,7 +15701,7 @@ public sealed class SquaredLossSDCARegressionLossFunction : SDCARegressionLossFu internal override string ComponentName => "SquaredLoss"; } - public abstract class SearchTerminator : ComponentKind { } + public abstract class SearchTerminator : ComponentKind {} @@ -15727,7 +15718,7 @@ public sealed class IterationLimitedSearchTerminator : SearchTerminator internal override string ComponentName => "IterationLimited"; } - public abstract class StopWordsRemover : ComponentKind { } + public abstract class StopWordsRemover : ComponentKind {} diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs index 83a72bf3d8..db6ce22546 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs @@ -34,381 +34,12 @@ public sealed class Arguments public string[] Exclude; } - private static class GeneratorUtils - { - public static string GetFullMethodName(ModuleCatalog.EntryPointInfo entryPointInfo) - { - return entryPointInfo.Name; - } - - public static Tuple GetClassAndMethodNames(ModuleCatalog.EntryPointInfo entryPointInfo) - { - var split = entryPointInfo.Name.Split('.'); - Contracts.Assert(split.Length == 2); - return new Tuple(split[0], split[1]); - } - - public static string GetCSharpTypeName(Type type) - { - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - return GetCSharpTypeName(type.GetGenericArguments()[0]) + "?"; - - string name; - using (var p = new CSharpCodeProvider()) - name = p.GetTypeOutput(new CodeTypeReference(type)); - return name; - } - - public static string GetOutputType(Type outputType) - { - Contracts.Check(Var.CheckType(outputType)); - - if (outputType.IsArray) - return $"ArrayVar<{GetCSharpTypeName(outputType.GetElementType())}>"; - if (outputType.IsGenericType && outputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && outputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"DictionaryVar<{GetCSharpTypeName(outputType.GetGenericTypeArgumentsEx()[1])}>"; - } - - return $"Var<{GetCSharpTypeName(outputType)}>"; - } - - public static string GetInputType(ModuleCatalog catalog, Type inputType, - Dictionary typesSymbolTable, string rootNameSpace = "") - { - if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Var<>)) - return $"Var<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[0])}>"; - - if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) - return $"ArrayVar<{GetCSharpTypeName(inputType.GetElementType())}>"; - - if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"DictionaryVar<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[1])}>"; - } - - if (Var.CheckType(inputType)) - return $"Var<{GetCSharpTypeName(inputType)}>"; - - bool isNullable = false; - bool isOptional = false; - var type = inputType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - type = type.GetGenericArguments()[0]; - isNullable = true; - } - else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - { - type = type.GetGenericArguments()[0]; - isOptional = true; - } - - var typeEnum = TlcModule.GetDataType(type); - switch (typeEnum) - { - case TlcModule.DataKind.Float: - case TlcModule.DataKind.Int: - case TlcModule.DataKind.UInt: - case TlcModule.DataKind.Char: - case TlcModule.DataKind.String: - case TlcModule.DataKind.Bool: - case TlcModule.DataKind.DataView: - case TlcModule.DataKind.TransformModel: - case TlcModule.DataKind.PredictorModel: - case TlcModule.DataKind.FileHandle: - return GetCSharpTypeName(inputType); - case TlcModule.DataKind.Array: - return GetInputType(catalog, inputType.GetElementType(), typesSymbolTable, rootNameSpace) + "[]"; - case TlcModule.DataKind.Component: - string kind; - bool success = catalog.TryGetComponentKind(type, out kind); - Contracts.Assert(success); - return $"{kind}"; - case TlcModule.DataKind.Enum: - var enumName = GetEnumName(type, typesSymbolTable, rootNameSpace); - if (isNullable) - return $"{enumName}?"; - if (isOptional) - return $"Optional<{enumName}>"; - return $"{enumName}"; - default: - if (isNullable) - return GetEnumName(type, typesSymbolTable, rootNameSpace); ; - if (isOptional) - return $"Optional<{GetEnumName(type, typesSymbolTable, rootNameSpace)}>"; - if (typesSymbolTable.ContainsKey(type.FullName)) - return GetEnumName(type, typesSymbolTable, rootNameSpace); - else - return GetEnumName(type, typesSymbolTable, rootNameSpace); ; - } - } - - public static bool IsComponent(Type inputType) - { - if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) - return false; - - if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return false; - } - - if (Var.CheckType(inputType)) - return false; - - var type = inputType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; - else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; - - var typeEnum = TlcModule.GetDataType(type); - return typeEnum == TlcModule.DataKind.Component; - } - - public static string Capitalize(string s) - { - if (string.IsNullOrEmpty(s)) - return s; - return char.ToUpperInvariant(s[0]) + s.Substring(1); - } - - private static string GetCharAsString(char value) - { - switch (value) - { - case '\t': - return "\\t"; - case '\n': - return "\\n"; - case '\r': - return "\\r"; - case '\\': - return "\\"; - case '\"': - return "\""; - case '\'': - return "\\'"; - case '\0': - return "\\0"; - case '\a': - return "\\a"; - case '\b': - return "\\b"; - case '\f': - return "\\f"; - case '\v': - return "\\v"; - default: - return value.ToString(); - } - } - - public static string GetValue(ModuleCatalog catalog, Type fieldType, object fieldValue, - Dictionary typesSymbolTable, string rootNameSpace = "") - { - if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Var<>)) - return $"new Var<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[0])}>()"; - - if (fieldType.IsArray && Var.CheckType(fieldType.GetElementType())) - return $"new ArrayVar<{GetCSharpTypeName(fieldType.GetElementType())}>()"; - - if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && fieldType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"new DictionaryVar<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[1])}>()"; - } - - if (Var.CheckType(fieldType)) - return $"new Var<{GetCSharpTypeName(fieldType)}>()"; - - if (fieldValue == null) - return null; - - if (!fieldType.IsInterface) - { - try - { - var defaultFieldValue = Activator.CreateInstance(fieldType); - if (defaultFieldValue == fieldValue) - return null; - } - catch (MissingMethodException) - { - // No parameterless constructor, ignore. - } - } - - var typeEnum = TlcModule.GetDataType(fieldType); - if (fieldType.IsGenericType && (fieldType.GetGenericTypeDefinition() == typeof(Optional<>) || fieldType.GetGenericTypeDefinition() == typeof(Nullable<>))) - fieldType = fieldType.GetGenericArguments()[0]; - switch (typeEnum) - { - case TlcModule.DataKind.Array: - var arr = fieldValue as Array; - if (arr != null && arr.GetLength(0) > 0) - return $"{{ {string.Join(", ", arr.Cast().Select(item => GetValue(catalog, fieldType.GetElementType(), item, typesSymbolTable)))} }}"; - return null; - case TlcModule.DataKind.String: - var strval = fieldValue as string; - if (strval != null) - return Quote(strval); - return null; - case TlcModule.DataKind.Float: - if (fieldValue is double d) - { - if (double.IsPositiveInfinity(d)) - return "double.PositiveInfinity"; - if (double.IsNegativeInfinity(d)) - return "double.NegativeInfinity"; - if (d != 0) - return d.ToString("R") + "d"; - } - else if (fieldValue is float f) - { - if (float.IsPositiveInfinity(f)) - return "float.PositiveInfinity"; - if (float.IsNegativeInfinity(f)) - return "float.NegativeInfinity"; - if (f != 0) - return f.ToString("R") + "f"; - } - return null; - case TlcModule.DataKind.Int: - if (fieldValue is int i) - { - if (i != 0) - return i.ToString(); - } - else if (fieldValue is long l) - { - if (l != 0) - return l.ToString(); - } - return null; - case TlcModule.DataKind.Bool: - return (bool)fieldValue ? "true" : "false"; - case TlcModule.DataKind.Enum: - return GetEnumName(fieldType, typesSymbolTable, rootNameSpace) + "." + fieldValue; - case TlcModule.DataKind.Char: - return $"'{GetCharAsString((char)fieldValue)}'"; - case TlcModule.DataKind.Component: - var type = fieldValue.GetType(); - ModuleCatalog.ComponentInfo componentInfo; - if (!catalog.TryFindComponent(fieldType, type, out componentInfo)) - return null; - object defaultComponent = null; - try - { - defaultComponent = Activator.CreateInstance(componentInfo.ArgumentType); - } - catch (MissingMethodException) - { - // No parameterless constructor, ignore. - } - var propertyBag = new List(); - if (defaultComponent != null) - { - foreach (var fieldInfo in componentInfo.ArgumentType.GetFields()) - { - var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; - if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) - continue; - if (fieldInfo.FieldType == typeof(JArray) || fieldInfo.FieldType == typeof(JObject)) - continue; - - var propertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(fieldValue), typesSymbolTable); - var defaultPropertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaultComponent), typesSymbolTable); - if (propertyValue != defaultPropertyValue) - propertyBag.Add($"{GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} = {propertyValue}"); - } - } - var properties = propertyBag.Count > 0 ? $" {{ {string.Join(", ", propertyBag)} }}" : ""; - return $"new {GetComponentName(componentInfo)}(){properties}"; - case TlcModule.DataKind.Unknown: - return $"new {GetEnumName(fieldType, typesSymbolTable, rootNameSpace)}()"; - default: - return fieldValue.ToString(); - } - } - - private static string Quote(string src) - { - var dst = src.Replace("\\", @"\\").Replace("\"", "\\\"").Replace("\n", @"\n").Replace("\r", @"\r"); - return "\"" + dst + "\""; - } - - public static string GetComponentName(ModuleCatalog.ComponentInfo component) - { - return $"{Capitalize(component.Name)}{component.Kind}"; - } - - public static string GetEnumName(Type type, Dictionary typesSymbolTable, string rootNamespace) - { - if (!typesSymbolTable.TryGetValue(type.FullName, out string fullname)) - fullname = GetSymbolFromType(typesSymbolTable, type, rootNamespace); - if (fullname.StartsWith(rootNamespace)) - return fullname.Substring(rootNamespace.Length + 1); - else return fullname; - } - - public static string GetJsonFromField(string fieldName, Type fieldType) - { - if (fieldType.IsArray && Var.CheckType(fieldType.GetElementType())) - return $"{{({fieldName}.IsValue ? {fieldName}.VarName : $\"'${{{fieldName}.VarName}}'\")}}"; - if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && fieldType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"'${{{fieldName}.VarName}}'"; - } - if (Var.CheckType(fieldType)) - return $"'${{{fieldName}.VarName}}'"; - - var isNullable = false; - var type = fieldType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - type = type.GetGenericArguments()[0]; - isNullable = true; - } - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; - - var typeEnum = TlcModule.GetDataType(type); - switch (typeEnum) - { - default: - if (isNullable) - return $"{{(!{fieldName}.HasValue ? \"null\" : $\"{{{fieldName}.Value}}\")}}"; - return $"{{{fieldName}}}"; - case TlcModule.DataKind.Enum: - if (isNullable) - return $"{{(!{fieldName}.HasValue ? \"null\" : $\"'{{{fieldName}.Value}}'\")}}"; - return $"'{{{fieldName}}}'"; - case TlcModule.DataKind.String: - return $"{{({fieldName} == null ? \"null\" : $\"'{{{fieldName}}}'\")}}"; - case TlcModule.DataKind.Bool: - if (isNullable) - return $"{{(!{fieldName}.HasValue ? \"null\" : {fieldName}.Value ? \"true\" : \"false\")}}"; - return $"'{{({fieldName} ? \"true\" : \"false\")}}'"; - case TlcModule.DataKind.Component: - case TlcModule.DataKind.Unknown: - return $"{{({fieldName} == null ? \"null\" : {fieldName}.ToJson())}}"; - case TlcModule.DataKind.Array: - return $"[{{({fieldName} == null ? \"\" : string.Join(\",\", {fieldName}.Select(f => $\"{GetJsonFromField("f", type.GetElementType())}\")))}}]"; - } - } - } - private readonly IHost _host; private readonly string _csFilename; private readonly string _regenerate; private readonly HashSet _excludedSet; private const string RegistrationName = "CSharpApiGenerator"; + private const string _defaultNamespace = "Microsoft.ML."; private Dictionary _typesSymbolTable = new Dictionary(); public CSharpApiGenerator(IHostEnvironment env, Arguments args, string regenerate) @@ -435,7 +66,7 @@ public void Generate(IEnumerable infos) var writer = IndentingTextWriter.Wrap(sw, " "); // Generate header - GenerateHeader(writer); + GeneratorUtils.GenerateHeader(writer); foreach (var entryPointInfo in catalog.AllEntryPoints().Where(x => !_excludedSet.Contains(x.Name)).OrderBy(x => x.Name)) { @@ -444,8 +75,8 @@ public void Generate(IEnumerable infos) } // Generate footer - GenerateFooter(writer); - GenerateFooter(writer); + GeneratorUtils.GenerateFooter(writer); + GeneratorUtils.GenerateFooter(writer); foreach (var entryPointInfo in catalog.AllEntryPoints().Where(x => !_excludedSet.Contains(x.Name)).OrderBy(x => x.Name)) { @@ -469,56 +100,20 @@ public void Generate(IEnumerable infos) } } - GenerateFooter(writer); - GenerateFooter(writer); + GeneratorUtils.GenerateFooter(writer); + GeneratorUtils.GenerateFooter(writer); writer.WriteLine("#pragma warning restore"); } } - private void GenerateHeader(IndentingTextWriter writer) - { - writer.WriteLine("//------------------------------------------------------------------------------"); - writer.WriteLine("// "); - writer.WriteLine("// This code was generated by a tool."); - writer.WriteLine("//"); - writer.WriteLine("// Changes to this file may cause incorrect behavior and will be lost if"); - writer.WriteLine("// the code is regenerated."); - writer.WriteLine("// "); - writer.WriteLine("//------------------------------------------------------------------------------"); - //writer.WriteLine($"// This file is auto generated. To regenerate it, run: {_regenerate}"); - writer.WriteLine("#pragma warning disable"); - writer.WriteLine("using System.Collections.Generic;"); - writer.WriteLine("using Microsoft.ML.Runtime;"); - writer.WriteLine("using Microsoft.ML.Runtime.Data;"); - writer.WriteLine("using Microsoft.ML.Runtime.EntryPoints;"); - writer.WriteLine("using Newtonsoft.Json;"); - writer.WriteLine("using System;"); - writer.WriteLine("using System.Linq;"); - writer.WriteLine("using Microsoft.ML.Runtime.CommandLine;"); - writer.WriteLine(); - writer.WriteLine("namespace Microsoft.ML"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("namespace Runtime"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("public sealed partial class Experiment"); - writer.WriteLine("{"); - writer.Indent(); - } - private void GenerateFooter(IndentingTextWriter writer) - { - writer.Outdent(); - writer.WriteLine("}"); - } private void GenerateInputOutput(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, ModuleCatalog catalog) { - var classAndMethod = GeneratorUtils.GetClassAndMethodNames(entryPointInfo); - writer.WriteLine($"namespace {classAndMethod.Item1}"); + var classAndMethod = GeneratorUtils.GetEntryPointMetadata(entryPointInfo); + writer.WriteLine($"namespace {classAndMethod.Namespace}"); writer.WriteLine("{"); writer.Indent(); GenerateInput(writer, entryPointInfo, catalog); @@ -527,77 +122,6 @@ private void GenerateInputOutput(IndentingTextWriter writer, writer.WriteLine(); } - /// - /// This methods creates a unique name for a class/struct/enum, given a type and a namespace. - /// It generates the name based on the property of the type - /// (see description here https://msdn.microsoft.com/en-us/library/system.type.fullname(v=vs.110).aspx). - /// Example: Assume we have the following structure in namespace X.Y: - /// class A { - /// class B { - /// enum C { - /// Value1, - /// Value2 - /// } - /// } - /// } - /// The full name of C would be X.Y.A+B+C. This method will generate the name "ABC" from it. In case - /// A is generic with one generic type, then the full name of typeof(A<float>.B.C) would be X.Y.A`1+B+C[[System.Single]]. - /// In this case, this method will generate the name "ASingleBC". - /// - /// A dictionary containing the names of the classes already generated. - /// This parameter is only used to ensure that the newly generated name is unique. - /// The type for which to generate the new name. - /// The namespace prefix to the new name. - /// A unique name derived from the given type and namespace. - private static string GetSymbolFromType(Dictionary typesSymbolTable, Type type, string currentNamespace) - { - var fullTypeName = type.FullName; - string name = currentNamespace != "" ? currentNamespace + '.' : ""; - - int bracketIndex = fullTypeName.IndexOf('['); - Type[] genericTypes = null; - if (type.IsGenericType) - genericTypes = type.GetGenericArguments(); - if (bracketIndex > 0) - { - Contracts.AssertValue(genericTypes); - fullTypeName = fullTypeName.Substring(0, bracketIndex); - } - - // When the type is nested, the names of the outer types are concatenated with a '+'. - var nestedNames = fullTypeName.Split('+'); - var baseName = nestedNames[0]; - - // We currently only handle generic types in the outer most class, support for generic inner classes - // can be added if needed. - int backTickIndex = baseName.LastIndexOf('`'); - int dotIndex = baseName.LastIndexOf('.'); - Contracts.Assert(dotIndex >= 0); - if (backTickIndex < 0) - name += baseName.Substring(dotIndex + 1); - else - { - name += baseName.Substring(dotIndex + 1, backTickIndex - dotIndex - 1); - Contracts.AssertValue(genericTypes); - if (genericTypes != null) - { - foreach (var genType in genericTypes) - { - var splitNames = genType.FullName.Split('+'); - if (splitNames[0].LastIndexOf('.') >= 0) - splitNames[0] = splitNames[0].Substring(splitNames[0].LastIndexOf('.') + 1); - name += string.Join("", splitNames); - } - } - } - - for (int i = 1; i < nestedNames.Length; i++) - name += nestedNames[i]; - - Contracts.Assert(typesSymbolTable.Select(kvp => kvp.Value).All(str => string.Compare(str, name) != 0)); - typesSymbolTable[type.FullName] = name; - return name; - } private void GenerateEnums(IndentingTextWriter writer, Type inputType, string currentNamespace) { @@ -626,7 +150,7 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu var enumType = Enum.GetUnderlyingType(type); - var symbolName = GetSymbolFromType(_typesSymbolTable, type, currentNamespace); + var symbolName = GeneratorUtils.GetSymbolFromType(_typesSymbolTable, type, currentNamespace); if (enumType == typeof(int)) writer.WriteLine($"public enum {symbolName.Substring(symbolName.LastIndexOf('.') + 1)}"); else @@ -676,7 +200,7 @@ string GetFriendlyTypeName(string currentNameSpace, string typeName) return typeName; } - private void GenerateStructs(IndentingTextWriter writer, + private void GenerateClasses(IndentingTextWriter writer, Type inputType, ModuleCatalog catalog, string currentNamespace) @@ -710,9 +234,9 @@ private void GenerateStructs(IndentingTextWriter writer, if (_typesSymbolTable.ContainsKey(type.FullName)) continue; - GenerateEnums(writer, type,currentNamespace); - GenerateStructs(writer, type, catalog, currentNamespace); - var symbolName = GetSymbolFromType(_typesSymbolTable, type, currentNamespace); + GenerateEnums(writer, type, currentNamespace); + GenerateClasses(writer, type, catalog, currentNamespace); + var symbolName = GeneratorUtils.GetSymbolFromType(_typesSymbolTable, type, currentNamespace); string classBase = ""; if (type.IsSubclassOf(typeof(OneToOneColumn))) classBase = $" : OneToOneColumn<{symbolName.Substring(symbolName.LastIndexOf('.') + 1)}>, IOneToOneColumn"; @@ -721,7 +245,7 @@ private void GenerateStructs(IndentingTextWriter writer, writer.WriteLine($"public sealed partial class {symbolName.Substring(symbolName.LastIndexOf('.') + 1)}{classBase}"); writer.WriteLine("{"); writer.Indent(); - GenerateInputFields(writer, type, catalog, _typesSymbolTable, currentNamespace); + GenerateInputFields(writer, type, catalog, currentNamespace); writer.Outdent(); writer.WriteLine("}"); writer.WriteLine(); @@ -815,119 +339,133 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, if (type.IsSubclassOf(typeof(OneToOneColumn))) { - var fieldName = GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); - writer.WriteLine($"public {className}()"); - writer.WriteLine("{"); - writer.WriteLine("}"); - writer.WriteLine(""); - writer.WriteLine($"public {className}(params string[] input{fieldName}s)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"if (input{fieldName}s != null)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"foreach (string input in input{fieldName}s)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"Add{fieldName}(input);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.Outdent(); - writer.WriteLine("}"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(""); - writer.WriteLine($"public {className}(params ValueTuple[] inputOutput{fieldName}s)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"if (inputOutput{fieldName}s != null)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"foreach (ValueTuple inputOutput in inputOutput{fieldName}s)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"Add{fieldName}(inputOutput.Item2, inputOutput.Item1);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.Outdent(); - writer.WriteLine("}"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(""); - writer.WriteLine($"public void Add{fieldName}(string source)"); - writer.WriteLine("{"); - writer.Indent(); - if (isArray) - { - writer.WriteLine($"var list = {fieldName} == null ? new List<{_typesSymbolTable[type.FullName]}>() : new List<{_typesSymbolTable[type.FullName]}>({fieldName});"); - writer.WriteLine($"list.Add(OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(source));"); - writer.WriteLine($"{fieldName} = list.ToArray();"); - } - else - writer.WriteLine($"{fieldName} = OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(source);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(); - writer.WriteLine($"public void Add{fieldName}(string name, string source)"); - writer.WriteLine("{"); - writer.Indent(); - if (isArray) - { - writer.WriteLine($"var list = {fieldName} == null ? new List<{_typesSymbolTable[type.FullName]}>() : new List<{_typesSymbolTable[type.FullName]}>({fieldName});"); - writer.WriteLine($"list.Add(OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source));"); - writer.WriteLine($"{fieldName} = list.ToArray();"); - } - else - writer.WriteLine($"{fieldName} = OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(); - - Contracts.Assert(columnType == null); - - columnType = type; + columnType = GenerateOneToOneColumn(writer, className, columnType, fieldInfo, inputAttr, type, isArray); } else if (type.IsSubclassOf(typeof(ManyToOneColumn))) { - var fieldName = GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); - writer.WriteLine($"public {className}()"); - writer.WriteLine("{"); - writer.WriteLine("}"); - writer.WriteLine(""); - writer.WriteLine($"public {className}(string output{fieldName}, params string[] input{fieldName}s)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"Add{fieldName}(output{fieldName}, input{fieldName}s);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(""); - writer.WriteLine($"public void Add{fieldName}(string name, params string[] source)"); - writer.WriteLine("{"); - writer.Indent(); - if (isArray) - { - writer.WriteLine($"var list = {fieldName} == null ? new List<{_typesSymbolTable[type.FullName]}>() : new List<{_typesSymbolTable[type.FullName]}>({fieldName});"); - writer.WriteLine($"list.Add(ManyToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source));"); - writer.WriteLine($"{fieldName} = list.ToArray();"); - } - else - writer.WriteLine($"{fieldName} = ManyToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(); + columnType = GenerateManyToOneColumn(writer, className, columnType, fieldInfo, inputAttr, type, isArray); + } + } + } - Contracts.Assert(columnType == null); + private Type GenerateManyToOneColumn(IndentingTextWriter writer, string className, Type columnType, + System.Reflection.FieldInfo fieldInfo, ArgumentAttribute inputAttr, Type type, bool isArray) + { + var fieldName = GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); + var generatedType = _typesSymbolTable[type.FullName]; + writer.WriteLine($"public {className}()"); + writer.WriteLine("{"); + writer.WriteLine("}"); + writer.WriteLine(""); + writer.WriteLine($"public {className}(string output{fieldName}, params string[] input{fieldName}s)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"Add{fieldName}(output{fieldName}, input{fieldName}s);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(""); + writer.WriteLine($"public void Add{fieldName}(string name, params string[] source)"); + writer.WriteLine("{"); + writer.Indent(); + if (isArray) + { + writer.WriteLine($"var list = {fieldName} == null ? new List<{generatedType}>() : new List<{generatedType}>({fieldName});"); + writer.WriteLine($"list.Add(ManyToOneColumn<{generatedType}>.Create(name, source));"); + writer.WriteLine($"{fieldName} = list.ToArray();"); + } + else + writer.WriteLine($"{fieldName} = ManyToOneColumn<{generatedType}>.Create(name, source);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(); - columnType = type; - } + Contracts.Assert(columnType == null); + + columnType = type; + return columnType; + } + + private Type GenerateOneToOneColumn(IndentingTextWriter writer, string className, Type columnType, + System.Reflection.FieldInfo fieldInfo, ArgumentAttribute inputAttr, Type type, bool isArray) + { + var fieldName = GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); + var generatedType = _typesSymbolTable[type.FullName]; + writer.WriteLine($"public {className}()"); + writer.WriteLine("{"); + writer.WriteLine("}"); + writer.WriteLine(""); + writer.WriteLine($"public {className}(params string[] input{fieldName}s)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"if (input{fieldName}s != null)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"foreach (string input in input{fieldName}s)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"Add{fieldName}(input);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.Outdent(); + writer.WriteLine("}"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(""); + writer.WriteLine($"public {className}(params ValueTuple[] inputOutput{fieldName}s)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"if (inputOutput{fieldName}s != null)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"foreach (ValueTuple inputOutput in inputOutput{fieldName}s)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"Add{fieldName}(inputOutput.Item2, inputOutput.Item1);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.Outdent(); + writer.WriteLine("}"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(""); + writer.WriteLine($"public void Add{fieldName}(string source)"); + writer.WriteLine("{"); + writer.Indent(); + if (isArray) + { + writer.WriteLine($"var list = {fieldName} == null ? new List<{generatedType}>() : new List<{generatedType}>({fieldName});"); + writer.WriteLine($"list.Add(OneToOneColumn<{generatedType}>.Create(source));"); + writer.WriteLine($"{fieldName} = list.ToArray();"); } + else + writer.WriteLine($"{fieldName} = OneToOneColumn<{generatedType}>.Create(source);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(); + writer.WriteLine($"public void Add{fieldName}(string name, string source)"); + writer.WriteLine("{"); + writer.Indent(); + if (isArray) + { + writer.WriteLine($"var list = {fieldName} == null ? new List<{generatedType}>() : new List<{generatedType}>({fieldName});"); + writer.WriteLine($"list.Add(OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source));"); + writer.WriteLine($"{fieldName} = list.ToArray();"); + } + else + writer.WriteLine($"{fieldName} = OneToOneColumn<{generatedType}>.Create(name, source);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(); + + Contracts.Assert(columnType == null); + + columnType = type; + return columnType; } - private void GenerateInput(IndentingTextWriter writer, - ModuleCatalog.EntryPointInfo entryPointInfo, - ModuleCatalog catalog) + private void GenerateInput(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, ModuleCatalog catalog) { - var classAndMethod = GeneratorUtils.GetClassAndMethodNames(entryPointInfo); + var entryPointMetadata = GeneratorUtils.GetEntryPointMetadata(entryPointInfo); string classBase = ""; if (entryPointInfo.InputKinds != null) { @@ -936,36 +474,33 @@ private void GenerateInput(IndentingTextWriter writer, classBase += ", Microsoft.ML.ILearningPipelineItem"; } - GenerateEnums(writer, entryPointInfo.InputType, "Microsoft.ML." + classAndMethod.Item1); + GenerateEnums(writer, entryPointInfo.InputType, _defaultNamespace + entryPointMetadata.Namespace); writer.WriteLine(); - GenerateStructs(writer, entryPointInfo.InputType, catalog, "Microsoft.ML." + classAndMethod.Item1); - writer.WriteLine("/// "); - foreach (var line in entryPointInfo.Description.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) - writer.WriteLine($"/// {line}"); - writer.WriteLine("/// "); + GenerateClasses(writer, entryPointInfo.InputType, catalog, _defaultNamespace + entryPointMetadata.Namespace); + GeneratorUtils.GenerateSummary(writer, entryPointInfo.Description); if (entryPointInfo.ObsoleteAttribute != null) writer.WriteLine($"[Obsolete(\"{entryPointInfo.ObsoleteAttribute.Message}\")]"); - writer.WriteLine($"public sealed partial class {classAndMethod.Item2}{classBase}"); + writer.WriteLine($"public sealed partial class {entryPointMetadata.ClassName}{classBase}"); writer.WriteLine("{"); writer.Indent(); writer.WriteLine(); if (entryPointInfo.InputKinds != null && entryPointInfo.InputKinds.Any(t => typeof(ILearningPipelineLoader).IsAssignableFrom(t))) - GenerateLoaderAddInputMethod(writer, classAndMethod.Item2); + GenerateLoaderAddInputMethod(writer, entryPointMetadata.ClassName); - GenerateColumnAddMethods(writer, entryPointInfo.InputType, catalog, classAndMethod.Item2, out Type transformType); + GenerateColumnAddMethods(writer, entryPointInfo.InputType, catalog, entryPointMetadata.ClassName, out Type transformType); writer.WriteLine(); - GenerateInputFields(writer, entryPointInfo.InputType, catalog, _typesSymbolTable, "Microsoft.ML." + classAndMethod.Item1); + GenerateInputFields(writer, entryPointInfo.InputType, catalog, _defaultNamespace + entryPointMetadata.Namespace); writer.WriteLine(); GenerateOutput(writer, entryPointInfo, out HashSet outputVariableNames); - GenerateApplyFunction(writer, entryPointInfo, transformType, outputVariableNames, entryPointInfo.InputKinds); + GenerateApplyFunction(writer, entryPointMetadata.ClassName, transformType, outputVariableNames, entryPointInfo.InputKinds); writer.Outdent(); writer.WriteLine("}"); } - private static void GenerateApplyFunction(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, + private static void GenerateApplyFunction(IndentingTextWriter writer, string className, Type type, HashSet outputVariableNames, Type[] inputKinds) { if (inputKinds == null) @@ -988,7 +523,6 @@ private static void GenerateApplyFunction(IndentingTextWriter writer, ModuleCata writer.WriteLine("public Var GetInputData() => TrainingData;"); writer.WriteLine(""); - string className = GeneratorUtils.GetClassAndMethodNames(entryPointInfo).Item2; writer.WriteLine("public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment)"); writer.WriteLine("{"); @@ -1058,8 +592,7 @@ private static void GenerateApplyFunction(IndentingTextWriter writer, ModuleCata writer.WriteLine("}"); } - private void GenerateInputFields(IndentingTextWriter writer, - Type inputType, ModuleCatalog catalog, Dictionary typesSymbolTable, string rootNameSpace) + private void GenerateInputFields(IndentingTextWriter writer, Type inputType, ModuleCatalog catalog, string rootNameSpace) { var defaults = Activator.CreateInstance(inputType); foreach (var fieldInfo in inputType.GetFields()) @@ -1071,9 +604,7 @@ private void GenerateInputFields(IndentingTextWriter writer, if (fieldInfo.FieldType == typeof(JObject)) continue; - writer.WriteLine("/// "); - writer.WriteLine($"/// {inputAttr.HelpText}"); - writer.WriteLine("/// "); + GeneratorUtils.GenerateSummary(writer, inputAttr.HelpText); if (fieldInfo.FieldType == typeof(JArray)) { writer.WriteLine($"public Experiment {GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} {{ get; set; }}"); @@ -1081,7 +612,7 @@ private void GenerateInputFields(IndentingTextWriter writer, continue; } - var inputTypeString = GeneratorUtils.GetInputType(catalog, fieldInfo.FieldType, typesSymbolTable, rootNameSpace); + var inputTypeString = GeneratorUtils.GetInputType(catalog, fieldInfo.FieldType, _typesSymbolTable, rootNameSpace); if (GeneratorUtils.IsComponent(fieldInfo.FieldType)) writer.WriteLine("[JsonConverter(typeof(ComponentSerializer))]"); if (GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name) != (inputAttr.Name ?? fieldInfo.Name)) @@ -1107,7 +638,7 @@ private void GenerateInputFields(IndentingTextWriter writer, } writer.Write($"public {inputTypeString} {GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} {{ get; set; }}"); - var defaultValue = GeneratorUtils.GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaults), typesSymbolTable, rootNameSpace); + var defaultValue = GeneratorUtils.GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaults), _typesSymbolTable, rootNameSpace); if (defaultValue != null) writer.Write($" = {defaultValue};"); writer.WriteLine(); @@ -1137,9 +668,7 @@ private void GenerateOutput(IndentingTextWriter writer, if (outputAttr == null) continue; - writer.WriteLine("/// "); - writer.WriteLine($"/// {outputAttr.Desc}"); - writer.WriteLine("/// "); + GeneratorUtils.GenerateSummary(writer, outputAttr.Desc); var outputTypeString = GeneratorUtils.GetOutputType(fieldInfo.FieldType); outputVariableNames.Add(GeneratorUtils.Capitalize(outputAttr.Name ?? fieldInfo.Name)); writer.WriteLine($"public {outputTypeString} {GeneratorUtils.Capitalize(outputAttr.Name ?? fieldInfo.Name)} {{ get; set; }} = new {outputTypeString}();"); @@ -1154,8 +683,7 @@ private void GenerateMethod(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, ModuleCatalog catalog) { - var inputOuputClassName = GeneratorUtils.GetFullMethodName(entryPointInfo); - inputOuputClassName = "Microsoft.ML." + inputOuputClassName; + var inputOuputClassName = _defaultNamespace + entryPointInfo.Name; writer.WriteLine($"public {inputOuputClassName}.Output Add({inputOuputClassName} input)"); writer.WriteLine("{"); writer.Indent(); @@ -1184,15 +712,13 @@ private void GenerateComponent(IndentingTextWriter writer, ModuleCatalog.Compone { GenerateEnums(writer, component.ArgumentType, "Runtime"); writer.WriteLine(); - GenerateStructs(writer, component.ArgumentType, catalog, "Runtime"); + GenerateClasses(writer, component.ArgumentType, catalog, "Runtime"); writer.WriteLine(); - writer.WriteLine("/// "); - writer.WriteLine($"/// {component.Description}"); - writer.WriteLine("/// "); + GeneratorUtils.GenerateSummary(writer, component.Description); writer.WriteLine($"public sealed class {GeneratorUtils.GetComponentName(component)} : {component.Kind}"); writer.WriteLine("{"); writer.Indent(); - GenerateInputFields(writer, component.ArgumentType, catalog, _typesSymbolTable, "Runtime"); + GenerateInputFields(writer, component.ArgumentType, catalog, "Runtime"); writer.WriteLine($"internal override string ComponentName => \"{component.Name}\";"); writer.Outdent(); writer.WriteLine("}"); diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs new file mode 100644 index 0000000000..61c0178a7a --- /dev/null +++ b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs @@ -0,0 +1,467 @@ +using Microsoft.CSharp; +using Microsoft.ML.Runtime.CommandLine; +using Microsoft.ML.Runtime.EntryPoints; +using Microsoft.ML.Runtime.Internal.Utilities; +using Newtonsoft.Json.Linq; +using System; +using System.CodeDom; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace Microsoft.ML.Runtime.Internal.Tools +{ + internal static class GeneratorUtils + { + public static string GetFullMethodName(ModuleCatalog.EntryPointInfo entryPointInfo) + { + return entryPointInfo.Name; + } + + public class EntryPointGenerationMetadata + { + public string Namespace { get; private set; } + public string ClassName { get; private set; } + public EntryPointGenerationMetadata(string @namespace, string className) + { + Namespace = @namespace; + ClassName = className; + } + } + + public static EntryPointGenerationMetadata GetEntryPointMetadata(ModuleCatalog.EntryPointInfo entryPointInfo) + { + var split = entryPointInfo.Name.Split('.'); + Contracts.Assert(split.Length == 2); + return new EntryPointGenerationMetadata(split[0], split[1]); + } + + public static string GetCSharpTypeName(Type type) + { + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + return GetCSharpTypeName(type.GetGenericArguments()[0]) + "?"; + + string name; + using (var p = new CSharpCodeProvider()) + name = p.GetTypeOutput(new CodeTypeReference(type)); + return name; + } + + public static string GetOutputType(Type outputType) + { + Contracts.Check(Var.CheckType(outputType)); + + if (outputType.IsArray) + return $"ArrayVar<{GetCSharpTypeName(outputType.GetElementType())}>"; + if (outputType.IsGenericType && outputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && outputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return $"DictionaryVar<{GetCSharpTypeName(outputType.GetGenericTypeArgumentsEx()[1])}>"; + } + + return $"Var<{GetCSharpTypeName(outputType)}>"; + } + + public static string GetInputType(ModuleCatalog catalog, Type inputType, Dictionary typesSymbolTable, string rootNameSpace) + { + if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Var<>)) + return $"Var<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[0])}>"; + + if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) + return $"ArrayVar<{GetCSharpTypeName(inputType.GetElementType())}>"; + + if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return $"DictionaryVar<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[1])}>"; + } + + if (Var.CheckType(inputType)) + return $"Var<{GetCSharpTypeName(inputType)}>"; + + bool isNullable = false; + bool isOptional = false; + var type = inputType; + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + type = type.GetGenericArguments()[0]; + isNullable = true; + } + else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) + { + type = type.GetGenericArguments()[0]; + isOptional = true; + } + + var typeEnum = TlcModule.GetDataType(type); + switch (typeEnum) + { + case TlcModule.DataKind.Float: + case TlcModule.DataKind.Int: + case TlcModule.DataKind.UInt: + case TlcModule.DataKind.Char: + case TlcModule.DataKind.String: + case TlcModule.DataKind.Bool: + case TlcModule.DataKind.DataView: + case TlcModule.DataKind.TransformModel: + case TlcModule.DataKind.PredictorModel: + case TlcModule.DataKind.FileHandle: + return GetCSharpTypeName(inputType); + case TlcModule.DataKind.Array: + return GetInputType(catalog, inputType.GetElementType(), typesSymbolTable, rootNameSpace) + "[]"; + case TlcModule.DataKind.Component: + string kind; + bool success = catalog.TryGetComponentKind(type, out kind); + Contracts.Assert(success); + return $"{kind}"; + case TlcModule.DataKind.Enum: + var enumName = GetEnumName(type, typesSymbolTable, rootNameSpace); + if (isNullable) + return $"{enumName}?"; + if (isOptional) + return $"Optional<{enumName}>"; + return $"{enumName}"; + default: + if (isNullable) + return GetEnumName(type, typesSymbolTable, rootNameSpace); ; + if (isOptional) + return $"Optional<{GetEnumName(type, typesSymbolTable, rootNameSpace)}>"; + if (typesSymbolTable.ContainsKey(type.FullName)) + return GetEnumName(type, typesSymbolTable, rootNameSpace); + else + return GetEnumName(type, typesSymbolTable, rootNameSpace); ; + } + } + + public static bool IsComponent(Type inputType) + { + if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) + return false; + + if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return false; + } + + if (Var.CheckType(inputType)) + return false; + + var type = inputType; + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + type = type.GetGenericArguments()[0]; + else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) + type = type.GetGenericArguments()[0]; + + var typeEnum = TlcModule.GetDataType(type); + return typeEnum == TlcModule.DataKind.Component; + } + + public static string Capitalize(string s) + { + if (string.IsNullOrEmpty(s)) + return s; + return char.ToUpperInvariant(s[0]) + s.Substring(1); + } + + private static string GetCharAsString(char value) + { + switch (value) + { + case '\t': + return "\\t"; + case '\n': + return "\\n"; + case '\r': + return "\\r"; + case '\\': + return "\\"; + case '\"': + return "\""; + case '\'': + return "\\'"; + case '\0': + return "\\0"; + case '\a': + return "\\a"; + case '\b': + return "\\b"; + case '\f': + return "\\f"; + case '\v': + return "\\v"; + default: + return value.ToString(); + } + } + + public static string GetValue(ModuleCatalog catalog, Type fieldType, object fieldValue, + Dictionary typesSymbolTable, string rootNameSpace = "") + { + if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Var<>)) + return $"new Var<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[0])}>()"; + + if (fieldType.IsArray && Var.CheckType(fieldType.GetElementType())) + return $"new ArrayVar<{GetCSharpTypeName(fieldType.GetElementType())}>()"; + + if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && fieldType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return $"new DictionaryVar<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[1])}>()"; + } + + if (Var.CheckType(fieldType)) + return $"new Var<{GetCSharpTypeName(fieldType)}>()"; + + if (fieldValue == null) + return null; + + if (!fieldType.IsInterface) + { + try + { + var defaultFieldValue = Activator.CreateInstance(fieldType); + if (defaultFieldValue == fieldValue) + return null; + } + catch (MissingMethodException) + { + // No parameterless constructor, ignore. + } + } + + var typeEnum = TlcModule.GetDataType(fieldType); + if (fieldType.IsGenericType && (fieldType.GetGenericTypeDefinition() == typeof(Optional<>) || fieldType.GetGenericTypeDefinition() == typeof(Nullable<>))) + fieldType = fieldType.GetGenericArguments()[0]; + switch (typeEnum) + { + case TlcModule.DataKind.Array: + var arr = fieldValue as Array; + if (arr != null && arr.GetLength(0) > 0) + return $"{{ {string.Join(", ", arr.Cast().Select(item => GetValue(catalog, fieldType.GetElementType(), item, typesSymbolTable)))} }}"; + return null; + case TlcModule.DataKind.String: + var strval = fieldValue as string; + if (strval != null) + return Quote(strval); + return null; + case TlcModule.DataKind.Float: + if (fieldValue is double d) + { + if (double.IsPositiveInfinity(d)) + return "double.PositiveInfinity"; + if (double.IsNegativeInfinity(d)) + return "double.NegativeInfinity"; + if (d != 0) + return d.ToString("R") + "d"; + } + else if (fieldValue is float f) + { + if (float.IsPositiveInfinity(f)) + return "float.PositiveInfinity"; + if (float.IsNegativeInfinity(f)) + return "float.NegativeInfinity"; + if (f != 0) + return f.ToString("R") + "f"; + } + return null; + case TlcModule.DataKind.Int: + if (fieldValue is int i) + { + if (i != 0) + return i.ToString(); + } + else if (fieldValue is long l) + { + if (l != 0) + return l.ToString(); + } + return null; + case TlcModule.DataKind.Bool: + return (bool)fieldValue ? "true" : "false"; + case TlcModule.DataKind.Enum: + return GetEnumName(fieldType, typesSymbolTable, rootNameSpace) + "." + fieldValue; + case TlcModule.DataKind.Char: + return $"'{GetCharAsString((char)fieldValue)}'"; + case TlcModule.DataKind.Component: + var type = fieldValue.GetType(); + ModuleCatalog.ComponentInfo componentInfo; + if (!catalog.TryFindComponent(fieldType, type, out componentInfo)) + return null; + object defaultComponent = null; + try + { + defaultComponent = Activator.CreateInstance(componentInfo.ArgumentType); + } + catch (MissingMethodException) + { + // No parameterless constructor, ignore. + } + var propertyBag = new List(); + if (defaultComponent != null) + { + foreach (var fieldInfo in componentInfo.ArgumentType.GetFields()) + { + var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; + if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) + continue; + if (fieldInfo.FieldType == typeof(JArray) || fieldInfo.FieldType == typeof(JObject)) + continue; + + var propertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(fieldValue), typesSymbolTable); + var defaultPropertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaultComponent), typesSymbolTable); + if (propertyValue != defaultPropertyValue) + propertyBag.Add($"{GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} = {propertyValue}"); + } + } + var properties = propertyBag.Count > 0 ? $" {{ {string.Join(", ", propertyBag)} }}" : ""; + return $"new {GetComponentName(componentInfo)}(){properties}"; + case TlcModule.DataKind.Unknown: + return $"new {GetEnumName(fieldType, typesSymbolTable, rootNameSpace)}()"; + default: + return fieldValue.ToString(); + } + } + + private static string Quote(string src) + { + var dst = src.Replace("\\", @"\\").Replace("\"", "\\\"").Replace("\n", @"\n").Replace("\r", @"\r"); + return "\"" + dst + "\""; + } + + public static string GetComponentName(ModuleCatalog.ComponentInfo component) + { + return $"{Capitalize(component.Name)}{component.Kind}"; + } + + public static string GetEnumName(Type type, Dictionary typesSymbolTable, string rootNamespace) + { + if (!typesSymbolTable.TryGetValue(type.FullName, out string fullname)) + fullname = GetSymbolFromType(typesSymbolTable, type, rootNamespace); + if (fullname.StartsWith(rootNamespace)) + return fullname.Substring(rootNamespace.Length + 1); + else return fullname; + } + + /// + /// This methods creates a unique name for a class/struct/enum, given a type and a namespace. + /// It generates the name based on the property of the type + /// (see description here https://msdn.microsoft.com/en-us/library/system.type.fullname(v=vs.110).aspx). + /// Example: Assume we have the following structure in namespace X.Y: + /// class A { + /// class B { + /// enum C { + /// Value1, + /// Value2 + /// } + /// } + /// } + /// The full name of C would be X.Y.A+B+C. This method will generate the name "ABC" from it. In case + /// A is generic with one generic type, then the full name of typeof(A<float>.B.C) would be X.Y.A`1+B+C[[System.Single]]. + /// In this case, this method will generate the name "ASingleBC". + /// + /// A dictionary containing the names of the classes already generated. + /// This parameter is only used to ensure that the newly generated name is unique. + /// The type for which to generate the new name. + /// The namespace prefix to the new name. + /// A unique name derived from the given type and namespace. + public static string GetSymbolFromType(Dictionary typesSymbolTable, Type type, string currentNamespace) + { + var fullTypeName = type.FullName; + string name = currentNamespace != "" ? currentNamespace + '.' : ""; + + int bracketIndex = fullTypeName.IndexOf('['); + Type[] genericTypes = null; + if (type.IsGenericType) + genericTypes = type.GetGenericArguments(); + if (bracketIndex > 0) + { + Contracts.AssertValue(genericTypes); + fullTypeName = fullTypeName.Substring(0, bracketIndex); + } + + // When the type is nested, the names of the outer types are concatenated with a '+'. + var nestedNames = fullTypeName.Split('+'); + var baseName = nestedNames[0]; + + // We currently only handle generic types in the outer most class, support for generic inner classes + // can be added if needed. + int backTickIndex = baseName.LastIndexOf('`'); + int dotIndex = baseName.LastIndexOf('.'); + Contracts.Assert(dotIndex >= 0); + if (backTickIndex < 0) + name += baseName.Substring(dotIndex + 1); + else + { + name += baseName.Substring(dotIndex + 1, backTickIndex - dotIndex - 1); + Contracts.AssertValue(genericTypes); + if (genericTypes != null) + { + foreach (var genType in genericTypes) + { + var splitNames = genType.FullName.Split('+'); + if (splitNames[0].LastIndexOf('.') >= 0) + splitNames[0] = splitNames[0].Substring(splitNames[0].LastIndexOf('.') + 1); + name += string.Join("", splitNames); + } + } + } + + for (int i = 1; i < nestedNames.Length; i++) + name += nestedNames[i]; + + Contracts.Assert(typesSymbolTable.Select(kvp => kvp.Value).All(str => string.Compare(str, name) != 0)); + typesSymbolTable[type.FullName] = name; + return name; + } + + public static void GenerateSummary(IndentingTextWriter writer, string summary) + { + if (string.IsNullOrEmpty(summary)) + return; + writer.WriteLine("/// "); + foreach (var line in summary.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) + writer.WriteLine($"/// {line}"); + writer.WriteLine("/// "); + } + + public static void GenerateHeader(IndentingTextWriter writer) + { + writer.WriteLine("//------------------------------------------------------------------------------"); + writer.WriteLine("// "); + writer.WriteLine("// This code was generated by a tool."); + writer.WriteLine("//"); + writer.WriteLine("// Changes to this file may cause incorrect behavior and will be lost if"); + writer.WriteLine("// the code is regenerated."); + writer.WriteLine("// "); + writer.WriteLine("//------------------------------------------------------------------------------"); + //writer.WriteLine($"// This file is auto generated. To regenerate it, run: {_regenerate}"); + writer.WriteLine("#pragma warning disable"); + writer.WriteLine("using System.Collections.Generic;"); + writer.WriteLine("using Microsoft.ML.Runtime;"); + writer.WriteLine("using Microsoft.ML.Runtime.Data;"); + writer.WriteLine("using Microsoft.ML.Runtime.EntryPoints;"); + writer.WriteLine("using Newtonsoft.Json;"); + writer.WriteLine("using System;"); + writer.WriteLine("using System.Linq;"); + writer.WriteLine("using Microsoft.ML.Runtime.CommandLine;"); + writer.WriteLine(); + writer.WriteLine("namespace Microsoft.ML"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("namespace Runtime"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("public sealed partial class Experiment"); + writer.WriteLine("{"); + writer.Indent(); + } + + public static void GenerateFooter(IndentingTextWriter writer) + { + writer.Outdent(); + writer.WriteLine("}"); + } + + } +} diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index c647110702..d95c6c48ce 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -15,7 +15,7 @@ public CSharpCodeGen(ITestOutputHelper output) : base(output) { } - [Fact(Skip = "Temporary solution(Windows ONLY) to regenerate codegenerated CSharpAPI.cs")] + [Fact] public void GenerateCSharpAPI() { var cSharpAPIPath = Path.Combine(RootDir, @"src\\Microsoft.ML\\CSharpApi.cs"); From 057bd560b4225458bc0762b86a1040c63a696cd8 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 13:10:36 -0700 Subject: [PATCH 03/11] refactor ExtractOptionalOrNullableType --- .../JsonUtils/JsonManifestUtils.cs | 18 ++------ .../Internal/Tools/CSharpApiGenerator.cs | 23 ++-------- .../Runtime/Internal/Tools/GeneratorUtils.cs | 43 ++++++++++--------- 3 files changed, 29 insertions(+), 55 deletions(-) diff --git a/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs b/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs index 289adc6f75..06f71d8d93 100644 --- a/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs +++ b/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs @@ -7,6 +7,7 @@ using System.Linq; using System.Reflection; using Microsoft.ML.Runtime.CommandLine; +using Microsoft.ML.Runtime.Internal.Tools; using Microsoft.ML.Runtime.Internal.Utilities; using Newtonsoft.Json.Linq; @@ -67,13 +68,7 @@ public static JObject BuildAllManifests(IExceptionContext ectx, ModuleCatalog ca { var jField = new JObject(); jField[FieldNames.Name] = fieldInfo.Name; - var type = fieldInfo.PropertyType; - // Dive inside Optional. - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; - // Dive inside Nullable. - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; + var type = GeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.PropertyType); // Dive inside Var. if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Var<>)) type = type.GetGenericArguments()[0]; @@ -308,14 +303,7 @@ private static JToken BuildTypeToken(IExceptionContext ectx, FieldInfo fieldInfo jo[FieldNames.ItemType] = typeString; return jo; } - - // Dive inside Optional. - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; - - // Dive inside Nullable. - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; + type = GeneratorUtils.ExtractOptionalOrNullableType(type); // Dive inside Var. if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Var<>)) diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs index db6ce22546..9b14bde553 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs @@ -106,8 +106,6 @@ public void Generate(IEnumerable infos) } } - - private void GenerateInputOutput(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, ModuleCatalog catalog) @@ -122,7 +120,6 @@ private void GenerateInputOutput(IndentingTextWriter writer, writer.WriteLine(); } - private void GenerateEnums(IndentingTextWriter writer, Type inputType, string currentNamespace) { foreach (var fieldInfo in inputType.GetFields()) @@ -130,13 +127,7 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) continue; - - var type = fieldInfo.FieldType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; - + var type = GeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.FieldType); if (_typesSymbolTable.ContainsKey(type.FullName)) continue; @@ -212,10 +203,7 @@ private void GenerateClasses(IndentingTextWriter writer, continue; var type = fieldInfo.FieldType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; + type = GeneratorUtils.ExtractOptionalOrNullableType(type); if (type.IsArray) type = type.GetElementType(); if (type == typeof(JArray) || type == typeof(JObject)) @@ -320,12 +308,7 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) continue; - - var type = fieldInfo.FieldType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; + var type = GeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.FieldType); var isArray = type.IsArray; if (isArray) type = type.GetElementType(); diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs index 61c0178a7a..3ef9885e1b 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs @@ -1,13 +1,16 @@ -using Microsoft.CSharp; -using Microsoft.ML.Runtime.CommandLine; -using Microsoft.ML.Runtime.EntryPoints; -using Microsoft.ML.Runtime.Internal.Utilities; -using Newtonsoft.Json.Linq; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + using System; using System.CodeDom; using System.Collections.Generic; using System.Linq; -using System.Text; +using Microsoft.CSharp; +using Microsoft.ML.Runtime.CommandLine; +using Microsoft.ML.Runtime.EntryPoints; +using Microsoft.ML.Runtime.Internal.Utilities; +using Newtonsoft.Json.Linq; namespace Microsoft.ML.Runtime.Internal.Tools { @@ -18,10 +21,10 @@ public static string GetFullMethodName(ModuleCatalog.EntryPointInfo entryPointIn return entryPointInfo.Name; } - public class EntryPointGenerationMetadata + public sealed class EntryPointGenerationMetadata { - public string Namespace { get; private set; } - public string ClassName { get; private set; } + public string Namespace { get; } + public string ClassName { get; } public EntryPointGenerationMetadata(string @namespace, string className) { Namespace = @namespace; @@ -36,15 +39,21 @@ public static EntryPointGenerationMetadata GetEntryPointMetadata(ModuleCatalog.E return new EntryPointGenerationMetadata(split[0], split[1]); } + public static Type ExtractOptionalOrNullableType(Type type) + { + if (type.IsGenericType && (type.GetGenericTypeDefinition() == typeof(Optional<>) || type.GetGenericTypeDefinition() == typeof(Nullable<>))) + type = type.GetGenericArguments()[0]; + + return type; + } + public static string GetCSharpTypeName(Type type) { if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) return GetCSharpTypeName(type.GetGenericArguments()[0]) + "?"; - string name; using (var p = new CSharpCodeProvider()) - name = p.GetTypeOutput(new CodeTypeReference(type)); - return name; + return p.GetTypeOutput(new CodeTypeReference(type)); } public static string GetOutputType(Type outputType) @@ -147,12 +156,7 @@ public static bool IsComponent(Type inputType) if (Var.CheckType(inputType)) return false; - var type = inputType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - type = type.GetGenericArguments()[0]; - else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - type = type.GetGenericArguments()[0]; - + var type = ExtractOptionalOrNullableType(inputType); var typeEnum = TlcModule.GetDataType(type); return typeEnum == TlcModule.DataKind.Component; } @@ -231,8 +235,7 @@ public static string GetValue(ModuleCatalog catalog, Type fieldType, object fiel } var typeEnum = TlcModule.GetDataType(fieldType); - if (fieldType.IsGenericType && (fieldType.GetGenericTypeDefinition() == typeof(Optional<>) || fieldType.GetGenericTypeDefinition() == typeof(Nullable<>))) - fieldType = fieldType.GetGenericArguments()[0]; + fieldType = ExtractOptionalOrNullableType(fieldType); switch (typeEnum) { case TlcModule.DataKind.Array: From 5f2e33c3700dfd31d6e07cbeaca35e95629daba9 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 14:22:42 -0700 Subject: [PATCH 04/11] test current version of CSharpApi, refactor generatedclasses into separate class --- .../JsonUtils/JsonManifestUtils.cs | 4 +- .../Internal/Tools/CSharpApiGenerator.cs | 218 +++------ .../Internal/Tools/CSharpGeneratorUtils.cs | 463 ++++++++++++++++++ .../Internal/Tools/GeneratedClasses.cs | 101 ++++ test/Microsoft.ML.Tests/CSharpCodeGen.cs | 24 +- 5 files changed, 644 insertions(+), 166 deletions(-) create mode 100644 src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs create mode 100644 src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs diff --git a/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs b/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs index 06f71d8d93..7950975a22 100644 --- a/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs +++ b/src/Microsoft.ML/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs @@ -68,7 +68,7 @@ public static JObject BuildAllManifests(IExceptionContext ectx, ModuleCatalog ca { var jField = new JObject(); jField[FieldNames.Name] = fieldInfo.Name; - var type = GeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.PropertyType); + var type = CSharpGeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.PropertyType); // Dive inside Var. if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Var<>)) type = type.GetGenericArguments()[0]; @@ -303,7 +303,7 @@ private static JToken BuildTypeToken(IExceptionContext ectx, FieldInfo fieldInfo jo[FieldNames.ItemType] = typeString; return jo; } - type = GeneratorUtils.ExtractOptionalOrNullableType(type); + type = CSharpGeneratorUtils.ExtractOptionalOrNullableType(type); // Dive inside Var. if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Var<>)) diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs index 9b14bde553..fa0900aa78 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs @@ -40,7 +40,7 @@ public sealed class Arguments private readonly HashSet _excludedSet; private const string RegistrationName = "CSharpApiGenerator"; private const string _defaultNamespace = "Microsoft.ML."; - private Dictionary _typesSymbolTable = new Dictionary(); + private readonly GeneratedClasses _generatedClasses; public CSharpApiGenerator(IHostEnvironment env, Arguments args, string regenerate) { @@ -55,6 +55,7 @@ public CSharpApiGenerator(IHostEnvironment env, Arguments args, string regenerat _csFilename = "CSharpApi.cs"; _regenerate = regenerate; _excludedSet = new HashSet(args.Exclude); + _generatedClasses = new GeneratedClasses(); } public void Generate(IEnumerable infos) @@ -66,17 +67,17 @@ public void Generate(IEnumerable infos) var writer = IndentingTextWriter.Wrap(sw, " "); // Generate header - GeneratorUtils.GenerateHeader(writer); + CSharpGeneratorUtils.GenerateHeader(writer); foreach (var entryPointInfo in catalog.AllEntryPoints().Where(x => !_excludedSet.Contains(x.Name)).OrderBy(x => x.Name)) { // Generate method - GenerateMethod(writer, entryPointInfo, catalog); + CSharpGeneratorUtils.GenerateMethod(writer, entryPointInfo.Name, _defaultNamespace); } // Generate footer - GeneratorUtils.GenerateFooter(writer); - GeneratorUtils.GenerateFooter(writer); + CSharpGeneratorUtils.GenerateFooter(writer); + CSharpGeneratorUtils.GenerateFooter(writer); foreach (var entryPointInfo in catalog.AllEntryPoints().Where(x => !_excludedSet.Contains(x.Name)).OrderBy(x => x.Name)) { @@ -100,17 +101,15 @@ public void Generate(IEnumerable infos) } } - GeneratorUtils.GenerateFooter(writer); - GeneratorUtils.GenerateFooter(writer); + CSharpGeneratorUtils.GenerateFooter(writer); + CSharpGeneratorUtils.GenerateFooter(writer); writer.WriteLine("#pragma warning restore"); } } - private void GenerateInputOutput(IndentingTextWriter writer, - ModuleCatalog.EntryPointInfo entryPointInfo, - ModuleCatalog catalog) + private void GenerateInputOutput(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, ModuleCatalog catalog) { - var classAndMethod = GeneratorUtils.GetEntryPointMetadata(entryPointInfo); + var classAndMethod = CSharpGeneratorUtils.GetEntryPointMetadata(entryPointInfo); writer.WriteLine($"namespace {classAndMethod.Namespace}"); writer.WriteLine("{"); writer.Indent(); @@ -127,8 +126,8 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) continue; - var type = GeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.FieldType); - if (_typesSymbolTable.ContainsKey(type.FullName)) + var type = CSharpGeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.FieldType); + if (_generatedClasses.IsGenerated(type.FullName)) continue; if (!type.IsEnum) @@ -141,15 +140,16 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu var enumType = Enum.GetUnderlyingType(type); - var symbolName = GeneratorUtils.GetSymbolFromType(_typesSymbolTable, type, currentNamespace); + var apiName = _generatedClasses.GetApiName(type, currentNamespace); if (enumType == typeof(int)) - writer.WriteLine($"public enum {symbolName.Substring(symbolName.LastIndexOf('.') + 1)}"); + writer.WriteLine($"public enum {apiName}"); else { Contracts.Assert(enumType == typeof(byte)); - writer.WriteLine($"public enum {symbolName.Substring(symbolName.LastIndexOf('.') + 1)} : byte"); + writer.WriteLine($"public enum {apiName} : byte"); } + _generatedClasses.MarkAsGenerated(type.FullName); writer.Write("{"); writer.Indent(); var names = Enum.GetNames(type); @@ -176,25 +176,7 @@ private void GenerateEnums(IndentingTextWriter writer, Type inputType, string cu } } - string GetFriendlyTypeName(string currentNameSpace, string typeName) - { - Contracts.Assert(typeName.Length >= currentNameSpace.Length); - - int index = 0; - for (index = 0; index < currentNameSpace.Length && currentNameSpace[index] == typeName[index]; index++) ; - - if (index == 0) - return typeName; - if (typeName[index - 1] == '.') - return typeName.Substring(index); - - return typeName; - } - - private void GenerateClasses(IndentingTextWriter writer, - Type inputType, - ModuleCatalog catalog, - string currentNamespace) + private void GenerateClasses(IndentingTextWriter writer, Type inputType, ModuleCatalog catalog, string currentNamespace) { foreach (var fieldInfo in inputType.GetFields()) { @@ -203,7 +185,7 @@ private void GenerateClasses(IndentingTextWriter writer, continue; var type = fieldInfo.FieldType; - type = GeneratorUtils.ExtractOptionalOrNullableType(type); + type = CSharpGeneratorUtils.ExtractOptionalOrNullableType(type); if (type.IsArray) type = type.GetElementType(); if (type == typeof(JArray) || type == typeof(JObject)) @@ -220,19 +202,21 @@ private void GenerateClasses(IndentingTextWriter writer, if (typeEnum != TlcModule.DataKind.Unknown) continue; - if (_typesSymbolTable.ContainsKey(type.FullName)) + if (_generatedClasses.IsGenerated(type.FullName)) continue; GenerateEnums(writer, type, currentNamespace); GenerateClasses(writer, type, catalog, currentNamespace); - var symbolName = GeneratorUtils.GetSymbolFromType(_typesSymbolTable, type, currentNamespace); + + var apiName = _generatedClasses.GetApiName(type, currentNamespace); string classBase = ""; if (type.IsSubclassOf(typeof(OneToOneColumn))) - classBase = $" : OneToOneColumn<{symbolName.Substring(symbolName.LastIndexOf('.') + 1)}>, IOneToOneColumn"; + classBase = $" : OneToOneColumn<{apiName}>, IOneToOneColumn"; else if (type.IsSubclassOf(typeof(ManyToOneColumn))) - classBase = $" : ManyToOneColumn<{symbolName.Substring(symbolName.LastIndexOf('.') + 1)}>, IManyToOneColumn"; - writer.WriteLine($"public sealed partial class {symbolName.Substring(symbolName.LastIndexOf('.') + 1)}{classBase}"); + classBase = $" : ManyToOneColumn<{apiName}>, IManyToOneColumn"; + writer.WriteLine($"public sealed partial class {apiName}{classBase}"); writer.WriteLine("{"); writer.Indent(); + _generatedClasses.MarkAsGenerated(type.FullName); GenerateInputFields(writer, type, catalog, currentNamespace); writer.Outdent(); writer.WriteLine("}"); @@ -240,67 +224,8 @@ private void GenerateClasses(IndentingTextWriter writer, } } - private void GenerateLoaderAddInputMethod(IndentingTextWriter writer, string className) - { - //Constructor. - writer.WriteLine("[JsonIgnore]"); - writer.WriteLine("private string _inputFilePath = null;"); - writer.WriteLine($"public {className}(string filePath)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("_inputFilePath = filePath;"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(""); - - //SetInput. - writer.WriteLine($"public void SetInput(IHostEnvironment env, Experiment experiment)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("IFileHandle inputFile = new SimpleFileHandle(env, _inputFilePath, false, false);"); - writer.WriteLine("experiment.SetInput(InputFile, inputFile);"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(""); - - //GetInputData - writer.WriteLine("public Var GetInputData() => null;"); - writer.WriteLine(""); - - //Apply. - writer.WriteLine($"public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("Contracts.Assert(previousStep == null);"); - writer.WriteLine(""); - writer.WriteLine($"return new {className}PipelineStep(experiment.Add(this));"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(""); - - //Pipelinestep class. - writer.WriteLine($"private class {className}PipelineStep : ILearningPipelineDataStep"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"public {className}PipelineStep (Output output)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("Data = output.Data;"); - writer.WriteLine("Model = null;"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(); - writer.WriteLine("public Var Data { get; }"); - writer.WriteLine("public Var Model { get; }"); - writer.Outdent(); - writer.WriteLine("}"); - } - - private void GenerateColumnAddMethods(IndentingTextWriter writer, - Type inputType, - ModuleCatalog catalog, - string className, - out Type columnType) + private void GenerateColumnAddMethods(IndentingTextWriter writer, Type inputType, ModuleCatalog catalog, + string className, out Type columnType) { columnType = null; foreach (var fieldInfo in inputType.GetFields()) @@ -308,7 +233,7 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) continue; - var type = GeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.FieldType); + var type = CSharpGeneratorUtils.ExtractOptionalOrNullableType(fieldInfo.FieldType); var isArray = type.IsArray; if (isArray) type = type.GetElementType(); @@ -321,21 +246,17 @@ private void GenerateColumnAddMethods(IndentingTextWriter writer, continue; if (type.IsSubclassOf(typeof(OneToOneColumn))) - { columnType = GenerateOneToOneColumn(writer, className, columnType, fieldInfo, inputAttr, type, isArray); - } else if (type.IsSubclassOf(typeof(ManyToOneColumn))) - { columnType = GenerateManyToOneColumn(writer, className, columnType, fieldInfo, inputAttr, type, isArray); - } } } private Type GenerateManyToOneColumn(IndentingTextWriter writer, string className, Type columnType, System.Reflection.FieldInfo fieldInfo, ArgumentAttribute inputAttr, Type type, bool isArray) { - var fieldName = GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); - var generatedType = _typesSymbolTable[type.FullName]; + var fieldName = CSharpGeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); + var apiName = _generatedClasses.GetApiName(type, ""); writer.WriteLine($"public {className}()"); writer.WriteLine("{"); writer.WriteLine("}"); @@ -352,12 +273,12 @@ private Type GenerateManyToOneColumn(IndentingTextWriter writer, string classNam writer.Indent(); if (isArray) { - writer.WriteLine($"var list = {fieldName} == null ? new List<{generatedType}>() : new List<{generatedType}>({fieldName});"); - writer.WriteLine($"list.Add(ManyToOneColumn<{generatedType}>.Create(name, source));"); + writer.WriteLine($"var list = {fieldName} == null ? new List<{apiName}>() : new List<{apiName}>({fieldName});"); + writer.WriteLine($"list.Add(ManyToOneColumn<{apiName}>.Create(name, source));"); writer.WriteLine($"{fieldName} = list.ToArray();"); } else - writer.WriteLine($"{fieldName} = ManyToOneColumn<{generatedType}>.Create(name, source);"); + writer.WriteLine($"{fieldName} = ManyToOneColumn<{apiName}>.Create(name, source);"); writer.Outdent(); writer.WriteLine("}"); writer.WriteLine(); @@ -371,8 +292,8 @@ private Type GenerateManyToOneColumn(IndentingTextWriter writer, string classNam private Type GenerateOneToOneColumn(IndentingTextWriter writer, string className, Type columnType, System.Reflection.FieldInfo fieldInfo, ArgumentAttribute inputAttr, Type type, bool isArray) { - var fieldName = GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); - var generatedType = _typesSymbolTable[type.FullName]; + var fieldName = CSharpGeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name); + var generatedType = _generatedClasses.GetApiName(type, ""); writer.WriteLine($"public {className}()"); writer.WriteLine("{"); writer.WriteLine("}"); @@ -431,7 +352,7 @@ private Type GenerateOneToOneColumn(IndentingTextWriter writer, string className if (isArray) { writer.WriteLine($"var list = {fieldName} == null ? new List<{generatedType}>() : new List<{generatedType}>({fieldName});"); - writer.WriteLine($"list.Add(OneToOneColumn<{_typesSymbolTable[type.FullName]}>.Create(name, source));"); + writer.WriteLine($"list.Add(OneToOneColumn<{generatedType}>.Create(name, source));"); writer.WriteLine($"{fieldName} = list.ToArray();"); } else @@ -448,11 +369,11 @@ private Type GenerateOneToOneColumn(IndentingTextWriter writer, string className private void GenerateInput(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, ModuleCatalog catalog) { - var entryPointMetadata = GeneratorUtils.GetEntryPointMetadata(entryPointInfo); + var entryPointMetadata = CSharpGeneratorUtils.GetEntryPointMetadata(entryPointInfo); string classBase = ""; if (entryPointInfo.InputKinds != null) { - classBase += $" : {string.Join(", ", entryPointInfo.InputKinds.Select(GeneratorUtils.GetCSharpTypeName))}"; + classBase += $" : {string.Join(", ", entryPointInfo.InputKinds.Select(CSharpGeneratorUtils.GetCSharpTypeName))}"; if (entryPointInfo.InputKinds.Any(t => typeof(ITrainerInput).IsAssignableFrom(t) || typeof(ITransformInput).IsAssignableFrom(t))) classBase += ", Microsoft.ML.ILearningPipelineItem"; } @@ -460,7 +381,7 @@ private void GenerateInput(IndentingTextWriter writer, ModuleCatalog.EntryPointI GenerateEnums(writer, entryPointInfo.InputType, _defaultNamespace + entryPointMetadata.Namespace); writer.WriteLine(); GenerateClasses(writer, entryPointInfo.InputType, catalog, _defaultNamespace + entryPointMetadata.Namespace); - GeneratorUtils.GenerateSummary(writer, entryPointInfo.Description); + CSharpGeneratorUtils.GenerateSummary(writer, entryPointInfo.Description); if (entryPointInfo.ObsoleteAttribute != null) writer.WriteLine($"[Obsolete(\"{entryPointInfo.ObsoleteAttribute.Message}\")]"); @@ -470,7 +391,7 @@ private void GenerateInput(IndentingTextWriter writer, ModuleCatalog.EntryPointI writer.Indent(); writer.WriteLine(); if (entryPointInfo.InputKinds != null && entryPointInfo.InputKinds.Any(t => typeof(ILearningPipelineLoader).IsAssignableFrom(t))) - GenerateLoaderAddInputMethod(writer, entryPointMetadata.ClassName); + CSharpGeneratorUtils.GenerateLoaderAddInputMethod(writer, entryPointMetadata.ClassName); GenerateColumnAddMethods(writer, entryPointInfo.InputType, catalog, entryPointMetadata.ClassName, out Type transformType); writer.WriteLine(); @@ -483,8 +404,8 @@ private void GenerateInput(IndentingTextWriter writer, ModuleCatalog.EntryPointI writer.WriteLine("}"); } - private static void GenerateApplyFunction(IndentingTextWriter writer, string className, - Type type, HashSet outputVariableNames, Type[] inputKinds) + private static void GenerateApplyFunction(IndentingTextWriter writer, string className, Type type, + HashSet outputVariableNames, Type[] inputKinds) { if (inputKinds == null) return; @@ -587,18 +508,18 @@ private void GenerateInputFields(IndentingTextWriter writer, Type inputType, Mod if (fieldInfo.FieldType == typeof(JObject)) continue; - GeneratorUtils.GenerateSummary(writer, inputAttr.HelpText); + CSharpGeneratorUtils.GenerateSummary(writer, inputAttr.HelpText); if (fieldInfo.FieldType == typeof(JArray)) { - writer.WriteLine($"public Experiment {GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} {{ get; set; }}"); + writer.WriteLine($"public Experiment {CSharpGeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} {{ get; set; }}"); writer.WriteLine(); continue; } - var inputTypeString = GeneratorUtils.GetInputType(catalog, fieldInfo.FieldType, _typesSymbolTable, rootNameSpace); - if (GeneratorUtils.IsComponent(fieldInfo.FieldType)) + var inputTypeString = CSharpGeneratorUtils.GetInputType(catalog, fieldInfo.FieldType, _generatedClasses, rootNameSpace); + if (CSharpGeneratorUtils.IsComponent(fieldInfo.FieldType)) writer.WriteLine("[JsonConverter(typeof(ComponentSerializer))]"); - if (GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name) != (inputAttr.Name ?? fieldInfo.Name)) + if (CSharpGeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name) != (inputAttr.Name ?? fieldInfo.Name)) writer.WriteLine($"[JsonProperty(\"{inputAttr.Name ?? fieldInfo.Name}\")]"); // For range attributes on properties @@ -620,8 +541,8 @@ private void GenerateInputFields(IndentingTextWriter writer, Type inputType, Mod writer.WriteLine(sweepableParamAttr.ToString()); } - writer.Write($"public {inputTypeString} {GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} {{ get; set; }}"); - var defaultValue = GeneratorUtils.GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaults), _typesSymbolTable, rootNameSpace); + writer.Write($"public {inputTypeString} {CSharpGeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} {{ get; set; }}"); + var defaultValue = CSharpGeneratorUtils.GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaults), _generatedClasses, rootNameSpace); if (defaultValue != null) writer.Write($" = {defaultValue};"); writer.WriteLine(); @@ -629,14 +550,12 @@ private void GenerateInputFields(IndentingTextWriter writer, Type inputType, Mod } } - private void GenerateOutput(IndentingTextWriter writer, - ModuleCatalog.EntryPointInfo entryPointInfo, - out HashSet outputVariableNames) + private void GenerateOutput(IndentingTextWriter writer, ModuleCatalog.EntryPointInfo entryPointInfo, out HashSet outputVariableNames) { outputVariableNames = new HashSet(); string classBase = ""; if (entryPointInfo.OutputKinds != null) - classBase = $" : {string.Join(", ", entryPointInfo.OutputKinds.Select(GeneratorUtils.GetCSharpTypeName))}"; + classBase = $" : {string.Join(", ", entryPointInfo.OutputKinds.Select(CSharpGeneratorUtils.GetCSharpTypeName))}"; writer.WriteLine($"public sealed class Output{classBase}"); writer.WriteLine("{"); writer.Indent(); @@ -651,10 +570,10 @@ private void GenerateOutput(IndentingTextWriter writer, if (outputAttr == null) continue; - GeneratorUtils.GenerateSummary(writer, outputAttr.Desc); - var outputTypeString = GeneratorUtils.GetOutputType(fieldInfo.FieldType); - outputVariableNames.Add(GeneratorUtils.Capitalize(outputAttr.Name ?? fieldInfo.Name)); - writer.WriteLine($"public {outputTypeString} {GeneratorUtils.Capitalize(outputAttr.Name ?? fieldInfo.Name)} {{ get; set; }} = new {outputTypeString}();"); + CSharpGeneratorUtils.GenerateSummary(writer, outputAttr.Desc); + var outputTypeString = CSharpGeneratorUtils.GetOutputType(fieldInfo.FieldType); + outputVariableNames.Add(CSharpGeneratorUtils.Capitalize(outputAttr.Name ?? fieldInfo.Name)); + writer.WriteLine($"public {outputTypeString} {CSharpGeneratorUtils.Capitalize(outputAttr.Name ?? fieldInfo.Name)} {{ get; set; }} = new {outputTypeString}();"); writer.WriteLine(); } @@ -662,29 +581,6 @@ private void GenerateOutput(IndentingTextWriter writer, writer.WriteLine("}"); } - private void GenerateMethod(IndentingTextWriter writer, - ModuleCatalog.EntryPointInfo entryPointInfo, - ModuleCatalog catalog) - { - var inputOuputClassName = _defaultNamespace + entryPointInfo.Name; - writer.WriteLine($"public {inputOuputClassName}.Output Add({inputOuputClassName} input)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"var output = new {inputOuputClassName}.Output();"); - writer.WriteLine("Add(input, output);"); - writer.WriteLine("return output;"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(); - writer.WriteLine($"public void Add({inputOuputClassName} input, {inputOuputClassName}.Output output)"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine($"_jsonNodes.Add(Serialize(\"{entryPointInfo.Name}\", input, output));"); - writer.Outdent(); - writer.WriteLine("}"); - writer.WriteLine(); - } - private void GenerateComponentKind(IndentingTextWriter writer, string kind) { writer.WriteLine($"public abstract class {kind} : ComponentKind {{}}"); @@ -697,8 +593,8 @@ private void GenerateComponent(IndentingTextWriter writer, ModuleCatalog.Compone writer.WriteLine(); GenerateClasses(writer, component.ArgumentType, catalog, "Runtime"); writer.WriteLine(); - GeneratorUtils.GenerateSummary(writer, component.Description); - writer.WriteLine($"public sealed class {GeneratorUtils.GetComponentName(component)} : {component.Kind}"); + CSharpGeneratorUtils.GenerateSummary(writer, component.Description); + writer.WriteLine($"public sealed class {CSharpGeneratorUtils.GetComponentName(component)} : {component.Kind}"); writer.WriteLine("{"); writer.Indent(); GenerateInputFields(writer, component.ArgumentType, catalog, "Runtime"); diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs new file mode 100644 index 0000000000..aba5120197 --- /dev/null +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs @@ -0,0 +1,463 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.CodeDom; +using System.Collections.Generic; +using System.Linq; +using Microsoft.CSharp; +using Microsoft.ML.Runtime.CommandLine; +using Microsoft.ML.Runtime.EntryPoints; +using Microsoft.ML.Runtime.Internal.Utilities; +using Newtonsoft.Json.Linq; + +namespace Microsoft.ML.Runtime.Internal.Tools +{ + internal static class CSharpGeneratorUtils + { + public static string GetFullMethodName(ModuleCatalog.EntryPointInfo entryPointInfo) + { + return entryPointInfo.Name; + } + + public sealed class EntryPointGenerationMetadata + { + public string Namespace { get; } + public string ClassName { get; } + public EntryPointGenerationMetadata(string classNamespace, string className) + { + Namespace = classNamespace; + ClassName = className; + } + } + + public static EntryPointGenerationMetadata GetEntryPointMetadata(ModuleCatalog.EntryPointInfo entryPointInfo) + { + var split = entryPointInfo.Name.Split('.'); + Contracts.Assert(split.Length == 2); + return new EntryPointGenerationMetadata(split[0], split[1]); + } + + public static Type ExtractOptionalOrNullableType(Type type) + { + if (type.IsGenericType && (type.GetGenericTypeDefinition() == typeof(Optional<>) || type.GetGenericTypeDefinition() == typeof(Nullable<>))) + type = type.GetGenericArguments()[0]; + + return type; + } + + public static string GetCSharpTypeName(Type type) + { + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + return GetCSharpTypeName(type.GetGenericArguments()[0]) + "?"; + + using (var p = new CSharpCodeProvider()) + return p.GetTypeOutput(new CodeTypeReference(type)); + } + + public static string GetOutputType(Type outputType) + { + Contracts.Check(Var.CheckType(outputType)); + + if (outputType.IsArray) + return $"ArrayVar<{GetCSharpTypeName(outputType.GetElementType())}>"; + if (outputType.IsGenericType && outputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && outputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return $"DictionaryVar<{GetCSharpTypeName(outputType.GetGenericTypeArgumentsEx()[1])}>"; + } + + return $"Var<{GetCSharpTypeName(outputType)}>"; + } + + public static string GetInputType(ModuleCatalog catalog, Type inputType, GeneratedClasses generatedClasses, string rootNameSpace) + { + if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Var<>)) + return $"Var<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[0])}>"; + + if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) + return $"ArrayVar<{GetCSharpTypeName(inputType.GetElementType())}>"; + + if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return $"DictionaryVar<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[1])}>"; + } + + if (Var.CheckType(inputType)) + return $"Var<{GetCSharpTypeName(inputType)}>"; + + bool isNullable = false; + bool isOptional = false; + var type = inputType; + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + type = type.GetGenericArguments()[0]; + isNullable = true; + } + else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) + { + type = type.GetGenericArguments()[0]; + isOptional = true; + } + + var typeEnum = TlcModule.GetDataType(type); + switch (typeEnum) + { + case TlcModule.DataKind.Float: + case TlcModule.DataKind.Int: + case TlcModule.DataKind.UInt: + case TlcModule.DataKind.Char: + case TlcModule.DataKind.String: + case TlcModule.DataKind.Bool: + case TlcModule.DataKind.DataView: + case TlcModule.DataKind.TransformModel: + case TlcModule.DataKind.PredictorModel: + case TlcModule.DataKind.FileHandle: + return GetCSharpTypeName(inputType); + case TlcModule.DataKind.Array: + return GetInputType(catalog, inputType.GetElementType(), generatedClasses, rootNameSpace) + "[]"; + case TlcModule.DataKind.Component: + string kind; + bool success = catalog.TryGetComponentKind(type, out kind); + Contracts.Assert(success); + return $"{kind}"; + case TlcModule.DataKind.Enum: + var enumName = generatedClasses.GetApiName(type, rootNameSpace); + if (isNullable) + return $"{enumName}?"; + if (isOptional) + return $"Optional<{enumName}>"; + return $"{enumName}"; + default: + if (isNullable) + return generatedClasses.GetApiName(type, rootNameSpace) + "?"; + if (isOptional) + return $"Optional<{generatedClasses.GetApiName(type, rootNameSpace)}>"; + return generatedClasses.GetApiName(type, rootNameSpace); + } + } + + public static bool IsComponent(Type inputType) + { + if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) + return false; + + if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return false; + } + + if (Var.CheckType(inputType)) + return false; + + var type = ExtractOptionalOrNullableType(inputType); + var typeEnum = TlcModule.GetDataType(type); + return typeEnum == TlcModule.DataKind.Component; + } + + public static string Capitalize(string s) + { + if (string.IsNullOrEmpty(s)) + return s; + return char.ToUpperInvariant(s[0]) + s.Substring(1); + } + + private static string GetCharAsString(char value) + { + switch (value) + { + case '\t': + return "\\t"; + case '\n': + return "\\n"; + case '\r': + return "\\r"; + case '\\': + return "\\"; + case '\"': + return "\""; + case '\'': + return "\\'"; + case '\0': + return "\\0"; + case '\a': + return "\\a"; + case '\b': + return "\\b"; + case '\f': + return "\\f"; + case '\v': + return "\\v"; + default: + return value.ToString(); + } + } + + public static string GetValue(ModuleCatalog catalog, Type fieldType, object fieldValue, + GeneratedClasses generatedClasses, string rootNameSpace) + { + if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Var<>)) + return $"new Var<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[0])}>()"; + + if (fieldType.IsArray && Var.CheckType(fieldType.GetElementType())) + return $"new ArrayVar<{GetCSharpTypeName(fieldType.GetElementType())}>()"; + + if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Dictionary<,>) + && fieldType.GetGenericTypeArgumentsEx()[0] == typeof(string)) + { + return $"new DictionaryVar<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[1])}>()"; + } + + if (Var.CheckType(fieldType)) + return $"new Var<{GetCSharpTypeName(fieldType)}>()"; + + if (fieldValue == null) + return null; + + if (!fieldType.IsInterface) + { + try + { + var defaultFieldValue = Activator.CreateInstance(fieldType); + if (defaultFieldValue == fieldValue) + return null; + } + catch (MissingMethodException) + { + // No parameterless constructor, ignore. + } + } + + var typeEnum = TlcModule.GetDataType(fieldType); + fieldType = ExtractOptionalOrNullableType(fieldType); + switch (typeEnum) + { + case TlcModule.DataKind.Array: + var arr = fieldValue as Array; + if (arr != null && arr.GetLength(0) > 0) + return $"{{ {string.Join(", ", arr.Cast().Select(item => GetValue(catalog, fieldType.GetElementType(), item, generatedClasses, rootNameSpace)))} }}"; + return null; + case TlcModule.DataKind.String: + var strval = fieldValue as string; + if (strval != null) + return Quote(strval); + return null; + case TlcModule.DataKind.Float: + if (fieldValue is double d) + { + if (double.IsPositiveInfinity(d)) + return "double.PositiveInfinity"; + if (double.IsNegativeInfinity(d)) + return "double.NegativeInfinity"; + if (d != 0) + return d.ToString("R") + "d"; + } + else if (fieldValue is float f) + { + if (float.IsPositiveInfinity(f)) + return "float.PositiveInfinity"; + if (float.IsNegativeInfinity(f)) + return "float.NegativeInfinity"; + if (f != 0) + return f.ToString("R") + "f"; + } + return null; + case TlcModule.DataKind.Int: + if (fieldValue is int i) + { + if (i != 0) + return i.ToString(); + } + else if (fieldValue is long l) + { + if (l != 0) + return l.ToString(); + } + return null; + case TlcModule.DataKind.Bool: + return (bool)fieldValue ? "true" : "false"; + case TlcModule.DataKind.Enum: + return generatedClasses.GetApiName(fieldType, rootNameSpace) + "." + fieldValue; + case TlcModule.DataKind.Char: + return $"'{GetCharAsString((char)fieldValue)}'"; + case TlcModule.DataKind.Component: + var type = fieldValue.GetType(); + ModuleCatalog.ComponentInfo componentInfo; + if (!catalog.TryFindComponent(fieldType, type, out componentInfo)) + return null; + object defaultComponent = null; + try + { + defaultComponent = Activator.CreateInstance(componentInfo.ArgumentType); + } + catch (MissingMethodException) + { + // No parameterless constructor, ignore. + } + var propertyBag = new List(); + if (defaultComponent != null) + { + foreach (var fieldInfo in componentInfo.ArgumentType.GetFields()) + { + var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; + if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) + continue; + if (fieldInfo.FieldType == typeof(JArray) || fieldInfo.FieldType == typeof(JObject)) + continue; + + var propertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(fieldValue), generatedClasses, rootNameSpace); + var defaultPropertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaultComponent), generatedClasses, rootNameSpace); + if (propertyValue != defaultPropertyValue) + propertyBag.Add($"{Capitalize(inputAttr.Name ?? fieldInfo.Name)} = {propertyValue}"); + } + } + var properties = propertyBag.Count > 0 ? $" {{ {string.Join(", ", propertyBag)} }}" : ""; + return $"new {GetComponentName(componentInfo)}(){properties}"; + case TlcModule.DataKind.Unknown: + return $"new {generatedClasses.GetApiName(fieldType, rootNameSpace)}()"; + default: + return fieldValue.ToString(); + } + } + + private static string Quote(string src) + { + var dst = src.Replace("\\", @"\\").Replace("\"", "\\\"").Replace("\n", @"\n").Replace("\r", @"\r"); + return "\"" + dst + "\""; + } + + public static string GetComponentName(ModuleCatalog.ComponentInfo component) + { + return $"{Capitalize(component.Name)}{component.Kind}"; + } + + + public static void GenerateSummary(IndentingTextWriter writer, string summary) + { + if (string.IsNullOrEmpty(summary)) + return; + writer.WriteLine("/// "); + foreach (var line in summary.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) + writer.WriteLine($"/// {line}"); + writer.WriteLine("/// "); + } + + public static void GenerateHeader(IndentingTextWriter writer) + { + writer.WriteLine("//------------------------------------------------------------------------------"); + writer.WriteLine("// "); + writer.WriteLine("// This code was generated by a tool."); + writer.WriteLine("//"); + writer.WriteLine("// Changes to this file may cause incorrect behavior and will be lost if"); + writer.WriteLine("// the code is regenerated."); + writer.WriteLine("// "); + writer.WriteLine("//------------------------------------------------------------------------------"); + //writer.WriteLine($"// This file is auto generated. To regenerate it, run: {_regenerate}"); + writer.WriteLine("#pragma warning disable"); + writer.WriteLine("using System.Collections.Generic;"); + writer.WriteLine("using Microsoft.ML.Runtime;"); + writer.WriteLine("using Microsoft.ML.Runtime.Data;"); + writer.WriteLine("using Microsoft.ML.Runtime.EntryPoints;"); + writer.WriteLine("using Newtonsoft.Json;"); + writer.WriteLine("using System;"); + writer.WriteLine("using System.Linq;"); + writer.WriteLine("using Microsoft.ML.Runtime.CommandLine;"); + writer.WriteLine(); + writer.WriteLine("namespace Microsoft.ML"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("namespace Runtime"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("public sealed partial class Experiment"); + writer.WriteLine("{"); + writer.Indent(); + } + + public static void GenerateFooter(IndentingTextWriter writer) + { + writer.Outdent(); + writer.WriteLine("}"); + } + + public static void GenerateMethod(IndentingTextWriter writer, string className, string defaultNamespace) + { + var inputOuputClassName = defaultNamespace + className; + writer.WriteLine($"public {inputOuputClassName}.Output Add({inputOuputClassName} input)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"var output = new {inputOuputClassName}.Output();"); + writer.WriteLine("Add(input, output);"); + writer.WriteLine("return output;"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(); + writer.WriteLine($"public void Add({inputOuputClassName} input, {inputOuputClassName}.Output output)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"_jsonNodes.Add(Serialize(\"{className}\", input, output));"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(); + } + + public static void GenerateLoaderAddInputMethod(IndentingTextWriter writer, string className) + { + //Constructor. + writer.WriteLine("[JsonIgnore]"); + writer.WriteLine("private string _inputFilePath = null;"); + writer.WriteLine($"public {className}(string filePath)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("_inputFilePath = filePath;"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(""); + + //SetInput. + writer.WriteLine($"public void SetInput(IHostEnvironment env, Experiment experiment)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("IFileHandle inputFile = new SimpleFileHandle(env, _inputFilePath, false, false);"); + writer.WriteLine("experiment.SetInput(InputFile, inputFile);"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(""); + + //GetInputData + writer.WriteLine("public Var GetInputData() => null;"); + writer.WriteLine(""); + + //Apply. + writer.WriteLine($"public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("Contracts.Assert(previousStep == null);"); + writer.WriteLine(""); + writer.WriteLine($"return new {className}PipelineStep(experiment.Add(this));"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(""); + + //Pipelinestep class. + writer.WriteLine($"private class {className}PipelineStep : ILearningPipelineDataStep"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine($"public {className}PipelineStep (Output output)"); + writer.WriteLine("{"); + writer.Indent(); + writer.WriteLine("Data = output.Data;"); + writer.WriteLine("Model = null;"); + writer.Outdent(); + writer.WriteLine("}"); + writer.WriteLine(); + writer.WriteLine("public Var Data { get; }"); + writer.WriteLine("public Var Model { get; }"); + writer.Outdent(); + writer.WriteLine("}"); + } + } +} diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs new file mode 100644 index 0000000000..4bd690eab0 --- /dev/null +++ b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs @@ -0,0 +1,101 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Microsoft.ML.Runtime.Internal.Tools +{ + internal sealed class GeneratedClasses + { + private sealed class ApiClass + { + public string OriginalName { get; set; } + public string NewName { get; set; } + public bool Generated { get; set; } + } + + private readonly Dictionary _typesSymbolTable; + public GeneratedClasses() + { + _typesSymbolTable = new Dictionary(); + } + + public string GetApiName(Type type, string rootNamespace) + { + string apiName = ""; + if (!_typesSymbolTable.TryGetValue(type.FullName, out ApiClass apiClass)) + apiName = GenerateIntenalName(type, rootNamespace); + else + apiName = apiClass.NewName; + + if (!string.IsNullOrEmpty(rootNamespace)&& apiName.StartsWith(rootNamespace)) + return apiName.Substring(rootNamespace.Length + 1); + else return apiName; + } + + private string GenerateIntenalName(Type type, string currentNamespace) + { + var fullTypeName = type.FullName; + string name = currentNamespace != "" ? currentNamespace + '.' : ""; + + int bracketIndex = fullTypeName.IndexOf('['); + Type[] genericTypes = null; + if (type.IsGenericType) + genericTypes = type.GetGenericArguments(); + if (bracketIndex > 0) + { + Contracts.AssertValue(genericTypes); + fullTypeName = fullTypeName.Substring(0, bracketIndex); + } + + // When the type is nested, the names of the outer types are concatenated with a '+'. + var nestedNames = fullTypeName.Split('+'); + var baseName = nestedNames[0]; + + // We currently only handle generic types in the outer most class, support for generic inner classes + // can be added if needed. + int backTickIndex = baseName.LastIndexOf('`'); + int dotIndex = baseName.LastIndexOf('.'); + Contracts.Assert(dotIndex >= 0); + if (backTickIndex < 0) + name += baseName.Substring(dotIndex + 1); + else + { + name += baseName.Substring(dotIndex + 1, backTickIndex - dotIndex - 1); + Contracts.AssertValue(genericTypes); + if (genericTypes != null) + { + foreach (var genType in genericTypes) + { + var splitNames = genType.FullName.Split('+'); + if (splitNames[0].LastIndexOf('.') >= 0) + splitNames[0] = splitNames[0].Substring(splitNames[0].LastIndexOf('.') + 1); + name += string.Join("", splitNames); + } + } + } + + for (int i = 1; i < nestedNames.Length; i++) + name += nestedNames[i]; + + Contracts.Assert(_typesSymbolTable.Values.All(apiclass => string.Compare(apiclass.NewName, name) != 0)); + _typesSymbolTable[type.FullName] = new ApiClass { OriginalName = type.FullName, Generated = false, NewName = name }; + return name; + } + + internal bool IsGenerated(string fullName) + { + if (!_typesSymbolTable.ContainsKey(fullName)) + return false; + return _typesSymbolTable[fullName].Generated; + } + + internal void MarkAsGenerated(string fullName) + { + _typesSymbolTable[fullName].Generated = true; + } + } +} diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index d95c6c48ce..0a0eb60af2 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.TestFramework; using System.IO; using Xunit; @@ -9,7 +10,7 @@ namespace Microsoft.ML.Tests { - public class CSharpCodeGen : BaseTestClass + public class CSharpCodeGen : BaseTestBaseline { public CSharpCodeGen(ITestOutputHelper output) : base(output) { @@ -18,8 +19,25 @@ public CSharpCodeGen(ITestOutputHelper output) : base(output) [Fact] public void GenerateCSharpAPI() { - var cSharpAPIPath = Path.Combine(RootDir, @"src\\Microsoft.ML\\CSharpApi.cs"); - Runtime.Tools.Maml.Main(new[] { $"? generator=cs{{csFilename={cSharpAPIPath}}}" }); + var dataPath = GetOutputPath("Api.cs"); + Runtime.Tools.Maml.Main(new[] { $"? generator=cs{{csFilename={dataPath}}}" }); + + var basePath = GetDataPath("../../src/Microsoft.ML/CSharpApi.cs"); + using (StreamReader baseline = OpenReader(basePath)) + using (StreamReader result = OpenReader(dataPath)) + { + for (; ; ) + { + string line1 = baseline.ReadLine(); + string line2 = result.ReadLine(); + + if (line1 == null && line2 == null) + break; + if (line2 != null && line2.Contains(dataPath)) + continue; + Assert.Equal(line1, line2); + } + } } } } From e673626afb3ce53e6d7f2259c3d8b96f082e7299 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 14:22:50 -0700 Subject: [PATCH 05/11] remove file --- .../Runtime/Internal/Tools/GeneratorUtils.cs | 470 ------------------ 1 file changed, 470 deletions(-) delete mode 100644 src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs deleted file mode 100644 index 3ef9885e1b..0000000000 --- a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratorUtils.cs +++ /dev/null @@ -1,470 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using System; -using System.CodeDom; -using System.Collections.Generic; -using System.Linq; -using Microsoft.CSharp; -using Microsoft.ML.Runtime.CommandLine; -using Microsoft.ML.Runtime.EntryPoints; -using Microsoft.ML.Runtime.Internal.Utilities; -using Newtonsoft.Json.Linq; - -namespace Microsoft.ML.Runtime.Internal.Tools -{ - internal static class GeneratorUtils - { - public static string GetFullMethodName(ModuleCatalog.EntryPointInfo entryPointInfo) - { - return entryPointInfo.Name; - } - - public sealed class EntryPointGenerationMetadata - { - public string Namespace { get; } - public string ClassName { get; } - public EntryPointGenerationMetadata(string @namespace, string className) - { - Namespace = @namespace; - ClassName = className; - } - } - - public static EntryPointGenerationMetadata GetEntryPointMetadata(ModuleCatalog.EntryPointInfo entryPointInfo) - { - var split = entryPointInfo.Name.Split('.'); - Contracts.Assert(split.Length == 2); - return new EntryPointGenerationMetadata(split[0], split[1]); - } - - public static Type ExtractOptionalOrNullableType(Type type) - { - if (type.IsGenericType && (type.GetGenericTypeDefinition() == typeof(Optional<>) || type.GetGenericTypeDefinition() == typeof(Nullable<>))) - type = type.GetGenericArguments()[0]; - - return type; - } - - public static string GetCSharpTypeName(Type type) - { - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - return GetCSharpTypeName(type.GetGenericArguments()[0]) + "?"; - - using (var p = new CSharpCodeProvider()) - return p.GetTypeOutput(new CodeTypeReference(type)); - } - - public static string GetOutputType(Type outputType) - { - Contracts.Check(Var.CheckType(outputType)); - - if (outputType.IsArray) - return $"ArrayVar<{GetCSharpTypeName(outputType.GetElementType())}>"; - if (outputType.IsGenericType && outputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && outputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"DictionaryVar<{GetCSharpTypeName(outputType.GetGenericTypeArgumentsEx()[1])}>"; - } - - return $"Var<{GetCSharpTypeName(outputType)}>"; - } - - public static string GetInputType(ModuleCatalog catalog, Type inputType, Dictionary typesSymbolTable, string rootNameSpace) - { - if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Var<>)) - return $"Var<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[0])}>"; - - if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) - return $"ArrayVar<{GetCSharpTypeName(inputType.GetElementType())}>"; - - if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"DictionaryVar<{GetCSharpTypeName(inputType.GetGenericTypeArgumentsEx()[1])}>"; - } - - if (Var.CheckType(inputType)) - return $"Var<{GetCSharpTypeName(inputType)}>"; - - bool isNullable = false; - bool isOptional = false; - var type = inputType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - type = type.GetGenericArguments()[0]; - isNullable = true; - } - else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - { - type = type.GetGenericArguments()[0]; - isOptional = true; - } - - var typeEnum = TlcModule.GetDataType(type); - switch (typeEnum) - { - case TlcModule.DataKind.Float: - case TlcModule.DataKind.Int: - case TlcModule.DataKind.UInt: - case TlcModule.DataKind.Char: - case TlcModule.DataKind.String: - case TlcModule.DataKind.Bool: - case TlcModule.DataKind.DataView: - case TlcModule.DataKind.TransformModel: - case TlcModule.DataKind.PredictorModel: - case TlcModule.DataKind.FileHandle: - return GetCSharpTypeName(inputType); - case TlcModule.DataKind.Array: - return GetInputType(catalog, inputType.GetElementType(), typesSymbolTable, rootNameSpace) + "[]"; - case TlcModule.DataKind.Component: - string kind; - bool success = catalog.TryGetComponentKind(type, out kind); - Contracts.Assert(success); - return $"{kind}"; - case TlcModule.DataKind.Enum: - var enumName = GetEnumName(type, typesSymbolTable, rootNameSpace); - if (isNullable) - return $"{enumName}?"; - if (isOptional) - return $"Optional<{enumName}>"; - return $"{enumName}"; - default: - if (isNullable) - return GetEnumName(type, typesSymbolTable, rootNameSpace); ; - if (isOptional) - return $"Optional<{GetEnumName(type, typesSymbolTable, rootNameSpace)}>"; - if (typesSymbolTable.ContainsKey(type.FullName)) - return GetEnumName(type, typesSymbolTable, rootNameSpace); - else - return GetEnumName(type, typesSymbolTable, rootNameSpace); ; - } - } - - public static bool IsComponent(Type inputType) - { - if (inputType.IsArray && Var.CheckType(inputType.GetElementType())) - return false; - - if (inputType.IsGenericType && inputType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && inputType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return false; - } - - if (Var.CheckType(inputType)) - return false; - - var type = ExtractOptionalOrNullableType(inputType); - var typeEnum = TlcModule.GetDataType(type); - return typeEnum == TlcModule.DataKind.Component; - } - - public static string Capitalize(string s) - { - if (string.IsNullOrEmpty(s)) - return s; - return char.ToUpperInvariant(s[0]) + s.Substring(1); - } - - private static string GetCharAsString(char value) - { - switch (value) - { - case '\t': - return "\\t"; - case '\n': - return "\\n"; - case '\r': - return "\\r"; - case '\\': - return "\\"; - case '\"': - return "\""; - case '\'': - return "\\'"; - case '\0': - return "\\0"; - case '\a': - return "\\a"; - case '\b': - return "\\b"; - case '\f': - return "\\f"; - case '\v': - return "\\v"; - default: - return value.ToString(); - } - } - - public static string GetValue(ModuleCatalog catalog, Type fieldType, object fieldValue, - Dictionary typesSymbolTable, string rootNameSpace = "") - { - if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Var<>)) - return $"new Var<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[0])}>()"; - - if (fieldType.IsArray && Var.CheckType(fieldType.GetElementType())) - return $"new ArrayVar<{GetCSharpTypeName(fieldType.GetElementType())}>()"; - - if (fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof(Dictionary<,>) - && fieldType.GetGenericTypeArgumentsEx()[0] == typeof(string)) - { - return $"new DictionaryVar<{GetCSharpTypeName(fieldType.GetGenericTypeArgumentsEx()[1])}>()"; - } - - if (Var.CheckType(fieldType)) - return $"new Var<{GetCSharpTypeName(fieldType)}>()"; - - if (fieldValue == null) - return null; - - if (!fieldType.IsInterface) - { - try - { - var defaultFieldValue = Activator.CreateInstance(fieldType); - if (defaultFieldValue == fieldValue) - return null; - } - catch (MissingMethodException) - { - // No parameterless constructor, ignore. - } - } - - var typeEnum = TlcModule.GetDataType(fieldType); - fieldType = ExtractOptionalOrNullableType(fieldType); - switch (typeEnum) - { - case TlcModule.DataKind.Array: - var arr = fieldValue as Array; - if (arr != null && arr.GetLength(0) > 0) - return $"{{ {string.Join(", ", arr.Cast().Select(item => GetValue(catalog, fieldType.GetElementType(), item, typesSymbolTable)))} }}"; - return null; - case TlcModule.DataKind.String: - var strval = fieldValue as string; - if (strval != null) - return Quote(strval); - return null; - case TlcModule.DataKind.Float: - if (fieldValue is double d) - { - if (double.IsPositiveInfinity(d)) - return "double.PositiveInfinity"; - if (double.IsNegativeInfinity(d)) - return "double.NegativeInfinity"; - if (d != 0) - return d.ToString("R") + "d"; - } - else if (fieldValue is float f) - { - if (float.IsPositiveInfinity(f)) - return "float.PositiveInfinity"; - if (float.IsNegativeInfinity(f)) - return "float.NegativeInfinity"; - if (f != 0) - return f.ToString("R") + "f"; - } - return null; - case TlcModule.DataKind.Int: - if (fieldValue is int i) - { - if (i != 0) - return i.ToString(); - } - else if (fieldValue is long l) - { - if (l != 0) - return l.ToString(); - } - return null; - case TlcModule.DataKind.Bool: - return (bool)fieldValue ? "true" : "false"; - case TlcModule.DataKind.Enum: - return GetEnumName(fieldType, typesSymbolTable, rootNameSpace) + "." + fieldValue; - case TlcModule.DataKind.Char: - return $"'{GetCharAsString((char)fieldValue)}'"; - case TlcModule.DataKind.Component: - var type = fieldValue.GetType(); - ModuleCatalog.ComponentInfo componentInfo; - if (!catalog.TryFindComponent(fieldType, type, out componentInfo)) - return null; - object defaultComponent = null; - try - { - defaultComponent = Activator.CreateInstance(componentInfo.ArgumentType); - } - catch (MissingMethodException) - { - // No parameterless constructor, ignore. - } - var propertyBag = new List(); - if (defaultComponent != null) - { - foreach (var fieldInfo in componentInfo.ArgumentType.GetFields()) - { - var inputAttr = fieldInfo.GetCustomAttributes(typeof(ArgumentAttribute), false).FirstOrDefault() as ArgumentAttribute; - if (inputAttr == null || inputAttr.Visibility == ArgumentAttribute.VisibilityType.CmdLineOnly) - continue; - if (fieldInfo.FieldType == typeof(JArray) || fieldInfo.FieldType == typeof(JObject)) - continue; - - var propertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(fieldValue), typesSymbolTable); - var defaultPropertyValue = GetValue(catalog, fieldInfo.FieldType, fieldInfo.GetValue(defaultComponent), typesSymbolTable); - if (propertyValue != defaultPropertyValue) - propertyBag.Add($"{GeneratorUtils.Capitalize(inputAttr.Name ?? fieldInfo.Name)} = {propertyValue}"); - } - } - var properties = propertyBag.Count > 0 ? $" {{ {string.Join(", ", propertyBag)} }}" : ""; - return $"new {GetComponentName(componentInfo)}(){properties}"; - case TlcModule.DataKind.Unknown: - return $"new {GetEnumName(fieldType, typesSymbolTable, rootNameSpace)}()"; - default: - return fieldValue.ToString(); - } - } - - private static string Quote(string src) - { - var dst = src.Replace("\\", @"\\").Replace("\"", "\\\"").Replace("\n", @"\n").Replace("\r", @"\r"); - return "\"" + dst + "\""; - } - - public static string GetComponentName(ModuleCatalog.ComponentInfo component) - { - return $"{Capitalize(component.Name)}{component.Kind}"; - } - - public static string GetEnumName(Type type, Dictionary typesSymbolTable, string rootNamespace) - { - if (!typesSymbolTable.TryGetValue(type.FullName, out string fullname)) - fullname = GetSymbolFromType(typesSymbolTable, type, rootNamespace); - if (fullname.StartsWith(rootNamespace)) - return fullname.Substring(rootNamespace.Length + 1); - else return fullname; - } - - /// - /// This methods creates a unique name for a class/struct/enum, given a type and a namespace. - /// It generates the name based on the property of the type - /// (see description here https://msdn.microsoft.com/en-us/library/system.type.fullname(v=vs.110).aspx). - /// Example: Assume we have the following structure in namespace X.Y: - /// class A { - /// class B { - /// enum C { - /// Value1, - /// Value2 - /// } - /// } - /// } - /// The full name of C would be X.Y.A+B+C. This method will generate the name "ABC" from it. In case - /// A is generic with one generic type, then the full name of typeof(A<float>.B.C) would be X.Y.A`1+B+C[[System.Single]]. - /// In this case, this method will generate the name "ASingleBC". - /// - /// A dictionary containing the names of the classes already generated. - /// This parameter is only used to ensure that the newly generated name is unique. - /// The type for which to generate the new name. - /// The namespace prefix to the new name. - /// A unique name derived from the given type and namespace. - public static string GetSymbolFromType(Dictionary typesSymbolTable, Type type, string currentNamespace) - { - var fullTypeName = type.FullName; - string name = currentNamespace != "" ? currentNamespace + '.' : ""; - - int bracketIndex = fullTypeName.IndexOf('['); - Type[] genericTypes = null; - if (type.IsGenericType) - genericTypes = type.GetGenericArguments(); - if (bracketIndex > 0) - { - Contracts.AssertValue(genericTypes); - fullTypeName = fullTypeName.Substring(0, bracketIndex); - } - - // When the type is nested, the names of the outer types are concatenated with a '+'. - var nestedNames = fullTypeName.Split('+'); - var baseName = nestedNames[0]; - - // We currently only handle generic types in the outer most class, support for generic inner classes - // can be added if needed. - int backTickIndex = baseName.LastIndexOf('`'); - int dotIndex = baseName.LastIndexOf('.'); - Contracts.Assert(dotIndex >= 0); - if (backTickIndex < 0) - name += baseName.Substring(dotIndex + 1); - else - { - name += baseName.Substring(dotIndex + 1, backTickIndex - dotIndex - 1); - Contracts.AssertValue(genericTypes); - if (genericTypes != null) - { - foreach (var genType in genericTypes) - { - var splitNames = genType.FullName.Split('+'); - if (splitNames[0].LastIndexOf('.') >= 0) - splitNames[0] = splitNames[0].Substring(splitNames[0].LastIndexOf('.') + 1); - name += string.Join("", splitNames); - } - } - } - - for (int i = 1; i < nestedNames.Length; i++) - name += nestedNames[i]; - - Contracts.Assert(typesSymbolTable.Select(kvp => kvp.Value).All(str => string.Compare(str, name) != 0)); - typesSymbolTable[type.FullName] = name; - return name; - } - - public static void GenerateSummary(IndentingTextWriter writer, string summary) - { - if (string.IsNullOrEmpty(summary)) - return; - writer.WriteLine("/// "); - foreach (var line in summary.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) - writer.WriteLine($"/// {line}"); - writer.WriteLine("/// "); - } - - public static void GenerateHeader(IndentingTextWriter writer) - { - writer.WriteLine("//------------------------------------------------------------------------------"); - writer.WriteLine("// "); - writer.WriteLine("// This code was generated by a tool."); - writer.WriteLine("//"); - writer.WriteLine("// Changes to this file may cause incorrect behavior and will be lost if"); - writer.WriteLine("// the code is regenerated."); - writer.WriteLine("// "); - writer.WriteLine("//------------------------------------------------------------------------------"); - //writer.WriteLine($"// This file is auto generated. To regenerate it, run: {_regenerate}"); - writer.WriteLine("#pragma warning disable"); - writer.WriteLine("using System.Collections.Generic;"); - writer.WriteLine("using Microsoft.ML.Runtime;"); - writer.WriteLine("using Microsoft.ML.Runtime.Data;"); - writer.WriteLine("using Microsoft.ML.Runtime.EntryPoints;"); - writer.WriteLine("using Newtonsoft.Json;"); - writer.WriteLine("using System;"); - writer.WriteLine("using System.Linq;"); - writer.WriteLine("using Microsoft.ML.Runtime.CommandLine;"); - writer.WriteLine(); - writer.WriteLine("namespace Microsoft.ML"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("namespace Runtime"); - writer.WriteLine("{"); - writer.Indent(); - writer.WriteLine("public sealed partial class Experiment"); - writer.WriteLine("{"); - writer.Indent(); - } - - public static void GenerateFooter(IndentingTextWriter writer) - { - writer.Outdent(); - writer.WriteLine("}"); - } - - } -} From 1d92bd3fa06ec2cb5e9c019c06af7cdef3a8224c Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 14:34:22 -0700 Subject: [PATCH 06/11] small cleanup --- .../Runtime/Internal/Tools/CSharpApiGenerator.cs | 2 -- .../Runtime/Internal/Tools/CSharpGeneratorUtils.cs | 7 ------- .../Runtime/Internal/Tools/GeneratedClasses.cs | 1 + test/Microsoft.ML.Tests/CSharpCodeGen.cs | 2 +- 4 files changed, 2 insertions(+), 10 deletions(-) diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs index fa0900aa78..b1f344873d 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpApiGenerator.cs @@ -3,11 +3,9 @@ // See the LICENSE file in the project root for more information. using System; -using System.CodeDom; using System.Collections.Generic; using System.IO; using System.Linq; -using Microsoft.CSharp; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs index aba5120197..8205963e28 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs @@ -16,11 +16,6 @@ namespace Microsoft.ML.Runtime.Internal.Tools { internal static class CSharpGeneratorUtils { - public static string GetFullMethodName(ModuleCatalog.EntryPointInfo entryPointInfo) - { - return entryPointInfo.Name; - } - public sealed class EntryPointGenerationMetadata { public string Namespace { get; } @@ -334,7 +329,6 @@ public static string GetComponentName(ModuleCatalog.ComponentInfo component) return $"{Capitalize(component.Name)}{component.Kind}"; } - public static void GenerateSummary(IndentingTextWriter writer, string summary) { if (string.IsNullOrEmpty(summary)) @@ -355,7 +349,6 @@ public static void GenerateHeader(IndentingTextWriter writer) writer.WriteLine("// the code is regenerated."); writer.WriteLine("// "); writer.WriteLine("//------------------------------------------------------------------------------"); - //writer.WriteLine($"// This file is auto generated. To regenerate it, run: {_regenerate}"); writer.WriteLine("#pragma warning disable"); writer.WriteLine("using System.Collections.Generic;"); writer.WriteLine("using Microsoft.ML.Runtime;"); diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs index 4bd690eab0..fe8adf35fc 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/GeneratedClasses.cs @@ -18,6 +18,7 @@ private sealed class ApiClass } private readonly Dictionary _typesSymbolTable; + public GeneratedClasses() { _typesSymbolTable = new Dictionary(); diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index 0a0eb60af2..a379f42708 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -17,7 +17,7 @@ public CSharpCodeGen(ITestOutputHelper output) : base(output) } [Fact] - public void GenerateCSharpAPI() + public void TestGeneratedCSharpAPI() { var dataPath = GetOutputPath("Api.cs"); Runtime.Tools.Maml.Main(new[] { $"? generator=cs{{csFilename={dataPath}}}" }); From 4258843d84ae9e3416d43d43d17e2a770c4173e1 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 15:11:54 -0700 Subject: [PATCH 07/11] regenerate csharpapi.cs --- src/Microsoft.ML/CSharpApi.cs | 363 +++++++++++++++++----------------- 1 file changed, 177 insertions(+), 186 deletions(-) diff --git a/src/Microsoft.ML/CSharpApi.cs b/src/Microsoft.ML/CSharpApi.cs index c103a96475..6009c5a1ae 100644 --- a/src/Microsoft.ML/CSharpApi.cs +++ b/src/Microsoft.ML/CSharpApi.cs @@ -1461,57 +1461,84 @@ public sealed class Output namespace Data { - public sealed partial class TextLoaderArguments + public enum DataKind : byte { - /// - /// Use separate parsing threads? - /// - public bool UseThreads { get; set; } = true; + I1 = 1, + U1 = 2, + I2 = 3, + U2 = 4, + I4 = 5, + U4 = 6, + I8 = 7, + U8 = 8, + R4 = 9, + Num = 9, + R8 = 10, + TX = 11, + Text = 11, + TXT = 11, + BL = 12, + Bool = 12, + TimeSpan = 13, + TS = 13, + DT = 14, + DateTime = 14, + DZ = 15, + DateTimeZone = 15, + UG = 16, + U16 = 16 + } + public sealed partial class TextLoaderRange + { /// - /// File containing a header with feature names. If specified, header defined in the data file (header+) is ignored. + /// First index in the range /// - public string HeaderFile { get; set; } + public int Min { get; set; } /// - /// Maximum number of rows to produce + /// Last index in the range /// - public long? MaxRows { get; set; } + public int? Max { get; set; } /// - /// Whether the input may include quoted values, which can contain separator characters, colons, and distinguish empty values from missing values. When true, consecutive separators denote a missing value and an empty value is denoted by "". When false, consecutive separators denote an empty value. + /// This range extends to the end of the line, but should be a fixed number of items /// - public bool AllowQuoting { get; set; } = true; + public bool AutoEnd { get; set; } = false; /// - /// Whether the input may include sparse representations + /// This range extends to the end of the line, which can vary from line to line /// - public bool AllowSparse { get; set; } = true; + public bool VariableEnd { get; set; } = false; /// - /// Number of source columns in the text data. Default is that sparse rows contain their size information. + /// This range includes only other indices not specified /// - public int? InputSize { get; set; } + public bool AllOther { get; set; } = false; /// - /// Source column separator. + /// Force scalar columns to be treated as vectors of length one /// - public char[] Separator { get; set; } = { '\t' }; + public bool ForceVector { get; set; } = false; + } + + public sealed partial class KeyRange + { /// - /// Column groups. Each group is specified as name:type:numeric-ranges, eg, col=Features:R4:1-17,26,35-40 + /// First index in the range /// - public TextLoaderColumn[] Column { get; set; } + public ulong Min { get; set; } = 0; /// - /// Remove trailing whitespace from lines + /// Last index in the range /// - public bool TrimWhitespace { get; set; } = false; + public ulong? Max { get; set; } /// - /// Data file has header with feature names. Header is read only if options 'hs' and 'hf' are not specified. + /// Whether the key is contiguous /// - public bool HasHeader { get; set; } = false; + public bool Contiguous { get; set; } = true; } @@ -1539,56 +1566,57 @@ public sealed partial class TextLoaderColumn } - public sealed partial class TextLoaderRange + public sealed partial class TextLoaderArguments { /// - /// First index in the range + /// Use separate parsing threads? /// - public int Min { get; set; } + public bool UseThreads { get; set; } = true; /// - /// Last index in the range + /// File containing a header with feature names. If specified, header defined in the data file (header+) is ignored. /// - public int? Max { get; set; } + public string HeaderFile { get; set; } /// - /// This range extends to the end of the line, but should be a fixed number of items + /// Maximum number of rows to produce /// - public bool AutoEnd { get; set; } = false; + public long? MaxRows { get; set; } /// - /// This range extends to the end of the line, which can vary from line to line + /// Whether the input may include quoted values, which can contain separator characters, colons, and distinguish empty values from missing values. When true, consecutive separators denote a missing value and an empty value is denoted by "". When false, consecutive separators denote an empty value. /// - public bool VariableEnd { get; set; } = false; + public bool AllowQuoting { get; set; } = true; /// - /// This range includes only other indices not specified + /// Whether the input may include sparse representations /// - public bool AllOther { get; set; } = false; + public bool AllowSparse { get; set; } = true; /// - /// Force scalar columns to be treated as vectors of length one + /// Number of source columns in the text data. Default is that sparse rows contain their size information. /// - public bool ForceVector { get; set; } = false; + public int? InputSize { get; set; } - } + /// + /// Source column separator. + /// + public char[] Separator { get; set; } = { '\t' }; - public sealed partial class KeyRange - { /// - /// First index in the range + /// Column groups. Each group is specified as name:type:numeric-ranges, eg, col=Features:R4:1-17,26,35-40 /// - public ulong Min { get; set; } = 0; + public TextLoaderColumn[] Column { get; set; } /// - /// Last index in the range + /// Remove trailing whitespace from lines /// - public ulong? Max { get; set; } + public bool TrimWhitespace { get; set; } = false; /// - /// Whether the key is contiguous + /// Data file has header with feature names. Header is read only if options 'hs' and 'hf' are not specified. /// - public bool Contiguous { get; set; } = true; + public bool HasHeader { get; set; } = false; } @@ -1640,7 +1668,7 @@ public TextLoaderPipelineStep (Output output) /// /// Arguments /// - public Microsoft.ML.Data.TextLoaderArguments Arguments { get; set; } = new Microsoft.ML.Data.TextLoaderArguments(); + public TextLoaderArguments Arguments { get; set; } = new TextLoaderArguments(); public sealed class Output @@ -1906,12 +1934,12 @@ public sealed partial class BinaryCrossValidator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphInput(); + public CrossValidationBinaryMacroSubGraphInput Inputs { get; set; } = new CrossValidationBinaryMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.CrossValidationBinaryMacroSubGraphOutput(); + public CrossValidationBinaryMacroSubGraphOutput Outputs { get; set; } = new CrossValidationBinaryMacroSubGraphOutput(); /// /// Column to use for stratification @@ -2178,7 +2206,7 @@ public sealed partial class CrossValidationResultsCombiner /// /// Specifies the trainer kind, which determines the evaluator to be used. /// - public Microsoft.ML.Models.MacroUtilsTrainerKinds Kind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public MacroUtilsTrainerKinds Kind { get; set; } = MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; public sealed class Output @@ -2258,12 +2286,12 @@ public sealed partial class CrossValidator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.CrossValidationMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.CrossValidationMacroSubGraphInput(); + public CrossValidationMacroSubGraphInput Inputs { get; set; } = new CrossValidationMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.CrossValidationMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.CrossValidationMacroSubGraphOutput(); + public CrossValidationMacroSubGraphOutput Outputs { get; set; } = new CrossValidationMacroSubGraphOutput(); /// /// Column to use for stratification @@ -2278,7 +2306,7 @@ public sealed partial class CrossValidator /// /// Specifies the trainer kind, which determines the evaluator to be used. /// - public Microsoft.ML.Models.MacroUtilsTrainerKinds Kind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public MacroUtilsTrainerKinds Kind { get; set; } = MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; /// /// Column to use for labels @@ -2687,7 +2715,7 @@ public sealed partial class OneVersusAll : Microsoft.ML.Runtime.EntryPoints.Comm /// /// The training subgraph output. /// - public Microsoft.ML.Models.OneVersusAllMacroSubGraphOutput OutputForSubGraph { get; set; } = new Microsoft.ML.Models.OneVersusAllMacroSubGraphOutput(); + public OneVersusAllMacroSubGraphOutput OutputForSubGraph { get; set; } = new OneVersusAllMacroSubGraphOutput(); /// /// Use probabilities in OVA combiner @@ -2717,12 +2745,12 @@ public sealed partial class OneVersusAll : Microsoft.ML.Runtime.EntryPoints.Comm /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public NormalizeOption NormalizeFeatures { get; set; } = NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public CachingOptions Caching { get; set; } = CachingOptions.Auto; public sealed class Output @@ -2862,12 +2890,12 @@ public sealed partial class OvaModelCombiner : Microsoft.ML.Runtime.EntryPoints. /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public NormalizeOption NormalizeFeatures { get; set; } = NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public CachingOptions Caching { get; set; } = CachingOptions.Auto; public sealed class Output @@ -3421,12 +3449,12 @@ public sealed partial class TrainTestBinaryEvaluator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.TrainTestBinaryMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.TrainTestBinaryMacroSubGraphInput(); + public TrainTestBinaryMacroSubGraphInput Inputs { get; set; } = new TrainTestBinaryMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.TrainTestBinaryMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.TrainTestBinaryMacroSubGraphOutput(); + public TrainTestBinaryMacroSubGraphOutput Outputs { get; set; } = new TrainTestBinaryMacroSubGraphOutput(); public sealed class Output @@ -3516,17 +3544,17 @@ public sealed partial class TrainTestEvaluator /// /// The training subgraph inputs /// - public Microsoft.ML.Models.TrainTestMacroSubGraphInput Inputs { get; set; } = new Microsoft.ML.Models.TrainTestMacroSubGraphInput(); + public TrainTestMacroSubGraphInput Inputs { get; set; } = new TrainTestMacroSubGraphInput(); /// /// The training subgraph outputs /// - public Microsoft.ML.Models.TrainTestMacroSubGraphOutput Outputs { get; set; } = new Microsoft.ML.Models.TrainTestMacroSubGraphOutput(); + public TrainTestMacroSubGraphOutput Outputs { get; set; } = new TrainTestMacroSubGraphOutput(); /// /// Specifies the trainer kind, which determines the evaluator to be used. /// - public Microsoft.ML.Models.MacroUtilsTrainerKinds Kind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public MacroUtilsTrainerKinds Kind { get; set; } = MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; /// /// Identifies which pipeline was run for this train test. @@ -3888,7 +3916,7 @@ public sealed partial class FastForestBinaryClassifier : Microsoft.ML.Runtime.En /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -4170,7 +4198,7 @@ public sealed partial class FastForestRegressor : Microsoft.ML.Runtime.EntryPoin /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -4403,7 +4431,7 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -4568,7 +4596,7 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -4829,7 +4857,7 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -4994,7 +5022,7 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -5215,7 +5243,7 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -5380,7 +5408,7 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -5606,7 +5634,7 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -5771,7 +5799,7 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Bundle Bundling { get; set; } = Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -6283,7 +6311,7 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry /// /// Cluster initialization algorithm /// - public Microsoft.ML.Trainers.KMeansPlusPlusTrainerInitAlgorithm InitAlgorithm { get; set; } = Microsoft.ML.Trainers.KMeansPlusPlusTrainerInitAlgorithm.KMeansParallel; + public KMeansPlusPlusTrainerInitAlgorithm InitAlgorithm { get; set; } = KMeansPlusPlusTrainerInitAlgorithm.KMeansParallel; /// /// Tolerance parameter for trainer convergence. Lower = slower, more accurate @@ -8029,7 +8057,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformBinColumn[] Column { get; set; } + public NormalizeTransformBinColumn[] Column { get; set; } /// /// Max number of bins, power of 2 recommended @@ -8132,7 +8160,7 @@ public sealed partial class CategoricalHashTransformColumn : OneToOneColumn /// Output kind: Bag (multi-set vector), Ind (indicator vector), or Key (index) /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind? OutputKind { get; set; } + public CategoricalTransformOutputKind? OutputKind { get; set; } /// /// Name of the new column @@ -8196,7 +8224,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:hashBits:src) /// - public Microsoft.ML.Transforms.CategoricalHashTransformColumn[] Column { get; set; } + public CategoricalHashTransformColumn[] Column { get; set; } /// /// Number of bits to hash into. Must be between 1 and 30, inclusive. @@ -8221,7 +8249,7 @@ public void AddColumn(string name, string source) /// /// Output kind: Bag (multi-set vector), Ind (indicator vector), or Key (index) /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind OutputKind { get; set; } = Microsoft.ML.Transforms.CategoricalTransformOutputKind.Bag; + public CategoricalTransformOutputKind OutputKind { get; set; } = CategoricalTransformOutputKind.Bag; /// /// Input dataset @@ -8287,7 +8315,7 @@ public sealed partial class CategoricalTransformColumn : OneToOneColumn /// Output kind: Bag (multi-set vector), Ind (indicator vector), Key (index), or Binary encoded indicator vector /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind? OutputKind { get; set; } + public CategoricalTransformOutputKind? OutputKind { get; set; } /// /// Maximum number of terms to keep when auto-training @@ -8302,7 +8330,7 @@ public sealed partial class CategoricalTransformColumn : OneToOneColumn /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder? Sort { get; set; } + public TermTransformSortOrder? Sort { get; set; } /// /// Whether key value metadata should be text, regardless of the actual input type @@ -8371,12 +8399,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.CategoricalTransformColumn[] Column { get; set; } + public CategoricalTransformColumn[] Column { get; set; } /// /// Output kind: Bag (multi-set vector), Ind (indicator vector), or Key (index) /// - public Microsoft.ML.Transforms.CategoricalTransformOutputKind OutputKind { get; set; } = Microsoft.ML.Transforms.CategoricalTransformOutputKind.Ind; + public CategoricalTransformOutputKind OutputKind { get; set; } = CategoricalTransformOutputKind.Ind; /// /// Maximum number of terms to keep per column when auto-training @@ -8391,7 +8419,7 @@ public void AddColumn(string name, string source) /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Whether key value metadata should be text, regardless of the actual input type @@ -8515,7 +8543,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.CharTokenizeTransformColumn[] Column { get; set; } + public CharTokenizeTransformColumn[] Column { get; set; } /// /// Whether to mark the beginning/end of each row/slot with start of text character (0x02)/end of text character (0x03) @@ -8615,7 +8643,7 @@ public void AddColumn(string name, params string[] source) /// /// New column definition(s) (optional form: name:srcs) /// - public Microsoft.ML.Transforms.ConcatTransformColumn[] Column { get; set; } + public ConcatTransformColumn[] Column { get; set; } /// /// Input dataset @@ -8734,7 +8762,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.CopyColumnsTransformColumn[] Column { get; set; } + public CopyColumnsTransformColumn[] Column { get; set; } /// /// Input dataset @@ -8918,41 +8946,13 @@ public ColumnSelectorPipelineStep(Output output) namespace Transforms { - public enum DataKind : byte - { - I1 = 1, - U1 = 2, - I2 = 3, - U2 = 4, - I4 = 5, - U4 = 6, - I8 = 7, - U8 = 8, - R4 = 9, - Num = 9, - R8 = 10, - TX = 11, - Text = 11, - TXT = 11, - BL = 12, - Bool = 12, - TimeSpan = 13, - TS = 13, - DT = 14, - DateTime = 14, - DZ = 15, - DateTimeZone = 15, - UG = 16, - U16 = 16 - } - public sealed partial class ConvertTransformColumn : OneToOneColumn, IOneToOneColumn { /// /// The result type /// - public Microsoft.ML.Transforms.DataKind? ResultType { get; set; } + public Microsoft.ML.Data.DataKind? ResultType { get; set; } /// /// For a key column, this defines the range of values @@ -9021,12 +9021,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:type:src) /// - public Microsoft.ML.Transforms.ConvertTransformColumn[] Column { get; set; } + public ConvertTransformColumn[] Column { get; set; } /// /// The result type /// - public Microsoft.ML.Transforms.DataKind? ResultType { get; set; } + public Microsoft.ML.Data.DataKind? ResultType { get; set; } /// /// For a key column, this defines the range of values @@ -9230,7 +9230,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformAffineColumn[] Column { get; set; } + public NormalizeTransformAffineColumn[] Column { get; set; } /// /// Whether to map zero to zero, preserving sparsity @@ -9311,7 +9311,7 @@ public sealed partial class DataCache : Microsoft.ML.Runtime.EntryPoints.CommonI /// /// Caching strategy /// - public Microsoft.ML.Transforms.CacheCachingType Caching { get; set; } = Microsoft.ML.Transforms.CacheCachingType.Memory; + public CacheCachingType Caching { get; set; } = CacheCachingType.Memory; /// /// Input dataset @@ -9454,7 +9454,7 @@ public sealed partial class TermTransformColumn : OneToOneColumn /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder? Sort { get; set; } + public TermTransformSortOrder? Sort { get; set; } /// /// Whether key value metadata should be text, regardless of the actual input type @@ -9523,7 +9523,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.TermTransformColumn[] Column { get; set; } + public TermTransformColumn[] Column { get; set; } /// /// Maximum number of terms to keep per column when auto-training @@ -9538,7 +9538,7 @@ public void AddColumn(string name, string source) /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Whether key value metadata should be text, regardless of the actual input type @@ -9892,7 +9892,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformGcnColumn[] Column { get; set; } + public LpNormNormalizerTransformGcnColumn[] Column { get; set; } /// /// Subtract mean from each value before normalizing @@ -10051,7 +10051,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.HashJoinTransformColumn[] Column { get; set; } + public HashJoinTransformColumn[] Column { get; set; } /// /// Whether the values need to be combined for a single hash @@ -10190,7 +10190,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.KeyToValueTransformColumn[] Column { get; set; } + public KeyToValueTransformColumn[] Column { get; set; } /// /// Input dataset @@ -10384,7 +10384,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.LabelIndicatorTransformColumn[] Column { get; set; } + public LabelIndicatorTransformColumn[] Column { get; set; } /// /// Label of the positive class. @@ -10583,7 +10583,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformLogNormalColumn[] Column { get; set; } + public NormalizeTransformLogNormalColumn[] Column { get; set; } /// /// Max number of examples used to train the normalizer @@ -10656,7 +10656,7 @@ public sealed partial class LpNormNormalizerTransformColumn : OneToOneColumn /// The norm to use to normalize each sample /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformNormalizerKind? NormKind { get; set; } + public LpNormNormalizerTransformNormalizerKind? NormKind { get; set; } /// /// Subtract mean from each value before normalizing @@ -10725,12 +10725,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformColumn[] Column { get; set; } + public LpNormNormalizerTransformColumn[] Column { get; set; } /// /// The norm to use to normalize each sample /// - public Microsoft.ML.Transforms.LpNormNormalizerTransformNormalizerKind NormKind { get; set; } = Microsoft.ML.Transforms.LpNormNormalizerTransformNormalizerKind.L2Norm; + public LpNormNormalizerTransformNormalizerKind NormKind { get; set; } = LpNormNormalizerTransformNormalizerKind.L2Norm; /// /// Subtract mean from each value before normalizing @@ -10877,7 +10877,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformAffineColumn[] Column { get; set; } + public NormalizeTransformAffineColumn[] Column { get; set; } /// /// Whether to map zero to zero, preserving sparsity @@ -10992,7 +10992,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformAffineColumn[] Column { get; set; } + public NormalizeTransformAffineColumn[] Column { get; set; } /// /// Whether to map zero to zero, preserving sparsity @@ -11074,7 +11074,7 @@ public sealed partial class NAHandleTransformColumn : OneToOneColumn /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAHandleTransformReplacementKind? Kind { get; set; } + public NAHandleTransformReplacementKind? Kind { get; set; } /// /// Whether to impute values by slot @@ -11148,12 +11148,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:rep:src) /// - public Microsoft.ML.Transforms.NAHandleTransformColumn[] Column { get; set; } + public NAHandleTransformColumn[] Column { get; set; } /// /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAHandleTransformReplacementKind ReplaceWith { get; set; } = Microsoft.ML.Transforms.NAHandleTransformReplacementKind.Def; + public NAHandleTransformReplacementKind ReplaceWith { get; set; } = NAHandleTransformReplacementKind.Def; /// /// Whether to impute values by slot @@ -11282,7 +11282,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NAIndicatorTransformColumn[] Column { get; set; } + public NAIndicatorTransformColumn[] Column { get; set; } /// /// Input dataset @@ -11401,7 +11401,7 @@ public void AddColumn(string name, string source) /// /// Columns to drop the NAs for /// - public Microsoft.ML.Transforms.NADropTransformColumn[] Column { get; set; } + public NADropTransformColumn[] Column { get; set; } /// /// Input dataset @@ -11551,7 +11551,7 @@ public sealed partial class NAReplaceTransformColumn : OneToOneColumn /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAReplaceTransformReplacementKind? Kind { get; set; } + public NAReplaceTransformReplacementKind? Kind { get; set; } /// /// Whether to impute values by slot @@ -11620,12 +11620,12 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:rep:src) /// - public Microsoft.ML.Transforms.NAReplaceTransformColumn[] Column { get; set; } + public NAReplaceTransformColumn[] Column { get; set; } /// /// The replacement method to utilize /// - public Microsoft.ML.Transforms.NAReplaceTransformReplacementKind ReplacementKind { get; set; } = Microsoft.ML.Transforms.NAReplaceTransformReplacementKind.Def; + public NAReplaceTransformReplacementKind ReplacementKind { get; set; } = NAReplaceTransformReplacementKind.Def; /// /// Whether to impute values by slot @@ -11744,7 +11744,7 @@ public sealed partial class NgramTransformColumn : OneToOneColumn /// Statistical measure used to evaluate how important a word is to a document in a corpus /// - public Microsoft.ML.Transforms.NgramTransformWeightingCriteria? Weighting { get; set; } + public NgramTransformWeightingCriteria? Weighting { get; set; } /// /// Name of the new column @@ -11808,7 +11808,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NgramTransformColumn[] Column { get; set; } + public NgramTransformColumn[] Column { get; set; } /// /// Maximum ngram length @@ -11833,7 +11833,7 @@ public void AddColumn(string name, string source) /// /// The weighting criteria /// - public Microsoft.ML.Transforms.NgramTransformWeightingCriteria Weighting { get; set; } = Microsoft.ML.Transforms.NgramTransformWeightingCriteria.Tf; + public NgramTransformWeightingCriteria Weighting { get; set; } = NgramTransformWeightingCriteria.Tf; /// /// Input dataset @@ -12102,7 +12102,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.PcaTransformColumn[] Column { get; set; } + public PcaTransformColumn[] Column { get; set; } /// /// The name of the weight column @@ -12276,7 +12276,7 @@ public sealed partial class RandomNumberGenerator : Microsoft.ML.Runtime.EntryPo /// /// New column definition(s) (optional form: name:seed) /// - public Microsoft.ML.Transforms.GenerateNumberTransformColumn[] Column { get; set; } + public GenerateNumberTransformColumn[] Column { get; set; } /// /// Use an auto-incremented integer starting at zero instead of a random number @@ -12750,7 +12750,7 @@ public sealed partial class Segregator : Microsoft.ML.Runtime.EntryPoints.Common /// /// Specifies how to unroll multiple pivot columns of different size. /// - public Microsoft.ML.Transforms.UngroupTransformUngroupMode Mode { get; set; } = Microsoft.ML.Transforms.UngroupTransformUngroupMode.Inner; + public UngroupTransformUngroupMode Mode { get; set; } = UngroupTransformUngroupMode.Inner; /// /// Input dataset @@ -12935,7 +12935,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.NormalizeTransformBinColumn[] Column { get; set; } + public NormalizeTransformBinColumn[] Column { get; set; } /// /// Max number of bins, power of 2 recommended @@ -13055,7 +13055,7 @@ public sealed partial class TermLoaderArguments /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Drop unknown terms instead of mapping them to NA term. @@ -13088,12 +13088,12 @@ public void AddColumn(string name, params string[] source) /// /// New column definition (optional form: name:srcs). /// - public Microsoft.ML.Transforms.TextTransformColumn Column { get; set; } + public TextTransformColumn Column { get; set; } /// /// Dataset language or 'AutoDetect' to detect language per row. /// - public Microsoft.ML.Transforms.TextTransformLanguage Language { get; set; } = Microsoft.ML.Transforms.TextTransformLanguage.English; + public TextTransformLanguage Language { get; set; } = TextTransformLanguage.English; /// /// Stopwords remover. @@ -13104,7 +13104,7 @@ public void AddColumn(string name, params string[] source) /// /// Casing text using the rules of the invariant culture. /// - public Microsoft.ML.Transforms.TextNormalizerTransformCaseNormalizationMode TextCase { get; set; } = Microsoft.ML.Transforms.TextNormalizerTransformCaseNormalizationMode.Lower; + public TextNormalizerTransformCaseNormalizationMode TextCase { get; set; } = TextNormalizerTransformCaseNormalizationMode.Lower; /// /// Whether to keep diacritical marks or remove them. @@ -13129,7 +13129,7 @@ public void AddColumn(string name, params string[] source) /// /// A dictionary of whitelisted terms. /// - public Microsoft.ML.Transforms.TermLoaderArguments Dictionary { get; set; } + public TermLoaderArguments Dictionary { get; set; } /// /// Ngram feature extractor to use for words (WordBag/WordHashBag). @@ -13146,7 +13146,7 @@ public void AddColumn(string name, params string[] source) /// /// Normalize vectors (rows) individually by rescaling them to unit norm. /// - public Microsoft.ML.Transforms.TextTransformTextNormKind VectorNormalizer { get; set; } = Microsoft.ML.Transforms.TextTransformTextNormKind.L2; + public TextTransformTextNormKind VectorNormalizer { get; set; } = TextTransformTextNormKind.L2; /// /// Input dataset @@ -13251,7 +13251,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) (optional form: name:src) /// - public Microsoft.ML.Transforms.TermTransformColumn[] Column { get; set; } + public TermTransformColumn[] Column { get; set; } /// /// Maximum number of terms to keep per column when auto-training @@ -13266,7 +13266,7 @@ public void AddColumn(string name, string source) /// /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, e.g., text sorting will be case sensitive (e.g., 'A' then 'Z' then 'a'). /// - public Microsoft.ML.Transforms.TermTransformSortOrder Sort { get; set; } = Microsoft.ML.Transforms.TermTransformSortOrder.Occurrence; + public TermTransformSortOrder Sort { get; set; } = TermTransformSortOrder.Occurrence; /// /// Whether key value metadata should be text, regardless of the actual input type @@ -13544,7 +13544,7 @@ public void AddColumn(string name, string source) /// /// New column definition(s) /// - public Microsoft.ML.Transforms.DelimitedTokenizeTransformColumn[] Column { get; set; } + public DelimitedTokenizeTransformColumn[] Column { get; set; } /// /// Comma separated set of term separator(s). Commonly: 'space', 'comma', 'semicolon' or other single character. @@ -13699,7 +13699,7 @@ public sealed class AutoMlStateAutoMlStateBase : AutoMlStateBase /// /// Supported metric for evaluator. /// - public Microsoft.ML.Runtime.AutoInferenceAutoMlMlStateArgumentsMetrics Metric { get; set; } = Microsoft.ML.Runtime.AutoInferenceAutoMlMlStateArgumentsMetrics.Auc; + public AutoInferenceAutoMlMlStateArgumentsMetrics Metric { get; set; } = AutoInferenceAutoMlMlStateArgumentsMetrics.Auc; /// /// AutoML engine (pipeline optimizer) that generates next candidates. @@ -13710,7 +13710,7 @@ public sealed class AutoMlStateAutoMlStateBase : AutoMlStateBase /// /// Kind of trainer for task, such as binary classification trainer, multiclass trainer, etc. /// - public Microsoft.ML.Models.MacroUtilsTrainerKinds TrainerKind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; + public Microsoft.ML.Models.MacroUtilsTrainerKinds TrainerKind { get; set; } = Microsoft.ML.Models.MacroUtilsTrainerKinds.SignatureBinaryClassifierTrainer; /// /// Arguments for creating terminator, which determines when to stop search. @@ -13730,9 +13730,6 @@ public abstract class CalibratorTrainer : ComponentKind {} - /// - /// - /// public sealed class FixedPlattCalibratorCalibratorTrainer : CalibratorTrainer { /// @@ -13750,9 +13747,6 @@ public sealed class FixedPlattCalibratorCalibratorTrainer : CalibratorTrainer - /// - /// - /// public sealed class NaiveCalibratorCalibratorTrainer : CalibratorTrainer { internal override string ComponentName => "NaiveCalibrator"; @@ -13760,9 +13754,6 @@ public sealed class NaiveCalibratorCalibratorTrainer : CalibratorTrainer - /// - /// - /// public sealed class PavCalibratorCalibratorTrainer : CalibratorTrainer { internal override string ComponentName => "PavCalibrator"; @@ -13966,7 +13957,7 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -14131,7 +14122,7 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -14274,12 +14265,12 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; internal override string ComponentName => "FastTreeBinaryClassification"; } @@ -14354,7 +14345,7 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -14519,7 +14510,7 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -14662,12 +14653,12 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; internal override string ComponentName => "FastTreeRanking"; } @@ -14702,7 +14693,7 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -14867,7 +14858,7 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -15010,12 +15001,12 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; internal override string ComponentName => "FastTreeRegression"; } @@ -15055,7 +15046,7 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent) /// - public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; + public Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType OptimizationAlgorithm { get; set; } = Microsoft.ML.Trainers.BoostedTreeArgsOptimizationAlgorithmType.GradientDescent; /// /// Early stopping rule. (Validation set (/valid) is required.) @@ -15220,7 +15211,7 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle. /// - public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; + public Microsoft.ML.Trainers.Bundle Bundling { get; set; } = Microsoft.ML.Trainers.Bundle.None; /// /// Maximum number of distinct values (bins) per feature @@ -15363,12 +15354,12 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// Normalize option for the feature column /// - public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; + public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto; /// /// Whether learner should cache input training data /// - public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; + public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto; internal override string ComponentName => "FastTreeTweedieRegression"; } @@ -15405,7 +15396,7 @@ public sealed class NGramNgramExtractor : NgramExtractor /// /// The weighting criteria /// - public Microsoft.ML.Transforms.NgramTransformWeightingCriteria Weighting { get; set; } = Microsoft.ML.Transforms.NgramTransformWeightingCriteria.Tf; + public Microsoft.ML.Transforms.NgramTransformWeightingCriteria Weighting { get; set; } = Microsoft.ML.Transforms.NgramTransformWeightingCriteria.Tf; internal override string ComponentName => "NGram"; } @@ -15490,7 +15481,7 @@ public sealed partial class PartitionedFileLoaderColumn /// /// Data type of the column. /// - public Microsoft.ML.Transforms.DataKind? Type { get; set; } + public Microsoft.ML.Data.DataKind? Type { get; set; } /// /// Index of the directory representing this column. @@ -15508,12 +15499,12 @@ public sealed class SimplePathParserPartitionedPathParser : PartitionedPathParse /// /// Column definitions used to override the Partitioned Path Parser. Expected with the format name:type:numeric-source, e.g. col=MyFeature:R4:1 /// - public Microsoft.ML.Runtime.PartitionedFileLoaderColumn[] Columns { get; set; } + public PartitionedFileLoaderColumn[] Columns { get; set; } /// /// Data type of each column. /// - public Microsoft.ML.Transforms.DataKind Type { get; set; } = Microsoft.ML.Transforms.DataKind.TX; + public Microsoft.ML.Data.DataKind Type { get; set; } = Microsoft.ML.Data.DataKind.TX; internal override string ComponentName => "SimplePathParser"; } From 0b264087995f83737866cf79549c447a453a4427 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 15:35:03 -0700 Subject: [PATCH 08/11] address #305 add test to regenerate CsharpApi.cs --- src/Microsoft.ML/CSharpApi.cs | 72 +++++++++---------- .../Internal/Tools/CSharpGeneratorUtils.cs | 36 ++++++---- test/Microsoft.ML.Tests/CSharpCodeGen.cs | 10 ++- 3 files changed, 66 insertions(+), 52 deletions(-) diff --git a/src/Microsoft.ML/CSharpApi.cs b/src/Microsoft.ML/CSharpApi.cs index 6009c5a1ae..42b6f47e98 100644 --- a/src/Microsoft.ML/CSharpApi.cs +++ b/src/Microsoft.ML/CSharpApi.cs @@ -2196,12 +2196,12 @@ public sealed partial class CrossValidationResultsCombiner /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for grouping /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } = "GroupId"; /// /// Specifies the trainer kind, which determines the evaluator to be used. @@ -2316,12 +2316,12 @@ public sealed partial class CrossValidator /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for grouping /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } = "GroupId"; public sealed class Output @@ -2725,7 +2725,7 @@ public sealed partial class OneVersusAll : Microsoft.ML.Runtime.EntryPoints.Comm /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -2870,7 +2870,7 @@ public sealed partial class OvaModelCombiner : Microsoft.ML.Runtime.EntryPoints. /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -3574,12 +3574,12 @@ public sealed partial class TrainTestEvaluator /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for grouping /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } = "GroupId"; public sealed class Output @@ -4034,12 +4034,12 @@ public sealed partial class FastForestBinaryClassifier : Microsoft.ML.Runtime.En /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -4316,12 +4316,12 @@ public sealed partial class FastForestRegressor : Microsoft.ML.Runtime.EntryPoin /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -4714,12 +4714,12 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -5140,12 +5140,12 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -5526,12 +5526,12 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -5917,12 +5917,12 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -6079,7 +6079,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -6220,7 +6220,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -6336,7 +6336,7 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// The data to be used for training @@ -6616,7 +6616,7 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -6766,7 +6766,7 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -7095,7 +7095,7 @@ public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoint /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// The data to be used for training @@ -7235,7 +7235,7 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -7765,7 +7765,7 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -14240,12 +14240,12 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -14628,12 +14628,12 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -14976,12 +14976,12 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels @@ -15329,12 +15329,12 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; /// /// Column to use for labels diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs index 8205963e28..5ec5a36025 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs @@ -42,6 +42,23 @@ public static Type ExtractOptionalOrNullableType(Type type) return type; } + public static Type ExtractOptionalOrNullableType(Type type, out bool isNullable, out bool isOptional) + { + isNullable = false; + isOptional = false; + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + type = type.GetGenericArguments()[0]; + isNullable = true; + } + else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) + { + type = type.GetGenericArguments()[0]; + isOptional = true; + } + return type; + } + public static string GetCSharpTypeName(Type type) { if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) @@ -83,20 +100,7 @@ public static string GetInputType(ModuleCatalog catalog, Type inputType, Generat if (Var.CheckType(inputType)) return $"Var<{GetCSharpTypeName(inputType)}>"; - bool isNullable = false; - bool isOptional = false; - var type = inputType; - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - type = type.GetGenericArguments()[0]; - isNullable = true; - } - else if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Optional<>)) - { - type = type.GetGenericArguments()[0]; - isOptional = true; - } - + var type = ExtractOptionalOrNullableType(inputType, out bool isNullable, out bool isOptional); var typeEnum = TlcModule.GetDataType(type); switch (typeEnum) { @@ -227,7 +231,9 @@ public static string GetValue(ModuleCatalog catalog, Type fieldType, object fiel } var typeEnum = TlcModule.GetDataType(fieldType); - fieldType = ExtractOptionalOrNullableType(fieldType); + fieldType = ExtractOptionalOrNullableType(fieldType, out bool isNullable, out bool isOptional); + if (isOptional) + fieldValue = (fieldValue as Optional).GetValue(); switch (typeEnum) { case TlcModule.DataKind.Array: diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index a379f42708..102efd1c61 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -16,12 +16,20 @@ public CSharpCodeGen(ITestOutputHelper output) : base(output) { } + + [Fact(Skip = "Execute this test if you want to regenerate CSharpApi file")] + public void RegenerateCSharpApi() + { + var basePath = GetDataPath("../../src/Microsoft.ML/CSharpApi.cs"); + Runtime.Tools.Maml.Main(new[] { $"? generator=cs{{csFilename={basePath}}}" }); + } + [Fact] public void TestGeneratedCSharpAPI() { var dataPath = GetOutputPath("Api.cs"); Runtime.Tools.Maml.Main(new[] { $"? generator=cs{{csFilename={dataPath}}}" }); - + var basePath = GetDataPath("../../src/Microsoft.ML/CSharpApi.cs"); using (StreamReader baseline = OpenReader(basePath)) using (StreamReader result = OpenReader(dataPath)) From 1334da69def423e47ce0448056ef2bcca07a4f2e Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 15:47:55 -0700 Subject: [PATCH 09/11] revert optional values change --- src/Microsoft.ML/CSharpApi.cs | 72 +++++++++---------- .../Internal/Tools/CSharpGeneratorUtils.cs | 2 - test/Microsoft.ML.Tests/CSharpCodeGen.cs | 1 - 3 files changed, 36 insertions(+), 39 deletions(-) diff --git a/src/Microsoft.ML/CSharpApi.cs b/src/Microsoft.ML/CSharpApi.cs index 42b6f47e98..6009c5a1ae 100644 --- a/src/Microsoft.ML/CSharpApi.cs +++ b/src/Microsoft.ML/CSharpApi.cs @@ -2196,12 +2196,12 @@ public sealed partial class CrossValidationResultsCombiner /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for grouping /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } /// /// Specifies the trainer kind, which determines the evaluator to be used. @@ -2316,12 +2316,12 @@ public sealed partial class CrossValidator /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for grouping /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } public sealed class Output @@ -2725,7 +2725,7 @@ public sealed partial class OneVersusAll : Microsoft.ML.Runtime.EntryPoints.Comm /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -2870,7 +2870,7 @@ public sealed partial class OvaModelCombiner : Microsoft.ML.Runtime.EntryPoints. /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -3574,12 +3574,12 @@ public sealed partial class TrainTestEvaluator /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for grouping /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupColumn { get; set; } public sealed class Output @@ -4034,12 +4034,12 @@ public sealed partial class FastForestBinaryClassifier : Microsoft.ML.Runtime.En /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -4316,12 +4316,12 @@ public sealed partial class FastForestRegressor : Microsoft.ML.Runtime.EntryPoin /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -4714,12 +4714,12 @@ public sealed partial class FastTreeBinaryClassifier : Microsoft.ML.Runtime.Entr /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -5140,12 +5140,12 @@ public sealed partial class FastTreeRanker : Microsoft.ML.Runtime.EntryPoints.Co /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -5526,12 +5526,12 @@ public sealed partial class FastTreeRegressor : Microsoft.ML.Runtime.EntryPoints /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -5917,12 +5917,12 @@ public sealed partial class FastTreeTweedieRegressor : Microsoft.ML.Runtime.Entr /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -6079,7 +6079,7 @@ public sealed partial class GeneralizedAdditiveModelBinaryClassifier : Microsoft /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -6220,7 +6220,7 @@ public sealed partial class GeneralizedAdditiveModelRegressor : Microsoft.ML.Run /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -6336,7 +6336,7 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// The data to be used for training @@ -6616,7 +6616,7 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -6766,7 +6766,7 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -7095,7 +7095,7 @@ public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoint /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// The data to be used for training @@ -7235,7 +7235,7 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -7765,7 +7765,7 @@ public sealed partial class StochasticGradientDescentBinaryClassifier : Microsof /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -14240,12 +14240,12 @@ public sealed class FastTreeBinaryClassificationFastTreeTrainer : FastTreeTraine /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -14628,12 +14628,12 @@ public sealed class FastTreeRankingFastTreeTrainer : FastTreeTrainer /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -14976,12 +14976,12 @@ public sealed class FastTreeRegressionFastTreeTrainer : FastTreeTrainer /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels @@ -15329,12 +15329,12 @@ public sealed class FastTreeTweedieRegressionFastTreeTrainer : FastTreeTrainer /// /// Column to use for example groupId /// - public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } = "GroupId"; + public Microsoft.ML.Runtime.EntryPoints.Optional GroupIdColumn { get; set; } /// /// Column to use for example weight /// - public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } = "Weight"; + public Microsoft.ML.Runtime.EntryPoints.Optional WeightColumn { get; set; } /// /// Column to use for labels diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs index 5ec5a36025..22de030b7c 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs @@ -232,8 +232,6 @@ public static string GetValue(ModuleCatalog catalog, Type fieldType, object fiel var typeEnum = TlcModule.GetDataType(fieldType); fieldType = ExtractOptionalOrNullableType(fieldType, out bool isNullable, out bool isOptional); - if (isOptional) - fieldValue = (fieldValue as Optional).GetValue(); switch (typeEnum) { case TlcModule.DataKind.Array: diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index 102efd1c61..08a17b6219 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -16,7 +16,6 @@ public CSharpCodeGen(ITestOutputHelper output) : base(output) { } - [Fact(Skip = "Execute this test if you want to regenerate CSharpApi file")] public void RegenerateCSharpApi() { From f8bfc326bd399e0b9de68a5872d95c32ede74651 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 11 Jun 2018 15:54:39 -0700 Subject: [PATCH 10/11] remove unnecessary line --- test/Microsoft.ML.Tests/CSharpCodeGen.cs | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index 08a17b6219..678edac461 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -40,8 +40,6 @@ public void TestGeneratedCSharpAPI() if (line1 == null && line2 == null) break; - if (line2 != null && line2.Contains(dataPath)) - continue; Assert.Equal(line1, line2); } } From 21ed75efcc893877c2eba0e61d57e5c17fac0ceb Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Tue, 12 Jun 2018 09:57:35 -0700 Subject: [PATCH 11/11] asserts to checks --- src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs index 22de030b7c..11e2116b18 100644 --- a/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs +++ b/src/Microsoft.ML/Runtime/Internal/Tools/CSharpGeneratorUtils.cs @@ -30,7 +30,7 @@ public EntryPointGenerationMetadata(string classNamespace, string className) public static EntryPointGenerationMetadata GetEntryPointMetadata(ModuleCatalog.EntryPointInfo entryPointInfo) { var split = entryPointInfo.Name.Split('.'); - Contracts.Assert(split.Length == 2); + Contracts.Check(split.Length == 2); return new EntryPointGenerationMetadata(split[0], split[1]); }