• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C# IMLDataSet类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中IMLDataSet的典型用法代码示例。如果您正苦于以下问题:C# IMLDataSet类的具体用法?C# IMLDataSet怎么用?C# IMLDataSet使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



IMLDataSet类属于命名空间,在下文中一共展示了IMLDataSet类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: Create

        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method,
                                                       new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                       mate);

            return train;
        }
开发者ID:neismit,项目名称:emds,代码行数:34,代码来源:GeneticFactory.cs


示例2: JacobianChainRule

 public JacobianChainRule(BasicNetwork network, IMLDataSet indexableTraining)
 {
     BasicMLData data;
     BasicMLData data2;
     if (0 == 0)
     {
         goto Label_0055;
     }
     Label_0009:
     this._x61830ac74d65acc3 = new BasicMLDataPair(data, data2);
     return;
     Label_0055:
     this._xb12276308f0fa6d9 = indexableTraining;
     if (0 == 0)
     {
     }
     this._x87a7fc6a72741c2e = network;
     this._xabb126b401219ba2 = network.Structure.CalculateSize();
     this._x530ae94d583e0ea1 = (int) this._xb12276308f0fa6d9.Count;
     this._xbdeab667c25bbc32 = EngineArray.AllocateDouble2D(this._x530ae94d583e0ea1, this._xabb126b401219ba2);
     this._xc8a462f994253347 = new double[this._x530ae94d583e0ea1];
     data = new BasicMLData(this._xb12276308f0fa6d9.InputSize);
     data2 = new BasicMLData(this._xb12276308f0fa6d9.IdealSize);
     if (-2147483648 != 0)
     {
         goto Label_0009;
     }
     goto Label_0055;
 }
开发者ID:neismit,项目名称:emds,代码行数:29,代码来源:JacobianChainRule.cs


示例3: QuickPropagation

 public QuickPropagation(IContainsFlat network, IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     TrainFlatNetworkQPROP kqprop = new TrainFlatNetworkQPROP(network.Flat, this.Training, learnRate);
     base.FlatTraining = kqprop;
 }
开发者ID:neismit,项目名称:emds,代码行数:7,代码来源:QuickPropagation.cs


示例4: TrainAdaline

 public TrainAdaline(BasicNetwork network, IMLDataSet training, double learningRate)
     : base(TrainingImplementationType.Iterative)
 {
     if (((uint) learningRate) > uint.MaxValue)
     {
         goto Label_003B;
     }
     Label_0009:
     if (network.LayerCount > 2)
     {
         goto Label_003B;
     }
     Label_0012:
     this._x87a7fc6a72741c2e = network;
     this._x823a2b9c8bf459c5 = training;
     this._x9b481c22b6706459 = learningRate;
     return;
     Label_003B:
     throw new NeuralNetworkError("An ADALINE network only has two layers.");
     if (0x7fffffff == 0)
     {
         goto Label_0009;
     }
     goto Label_0012;
 }
开发者ID:neismit,项目名称:emds,代码行数:25,代码来源:TrainAdaline.cs


示例5: SVMSearchTrain

 public SVMSearchTrain(SupportVectorMachine method, IMLDataSet training)
     : base(TrainingImplementationType.Iterative)
 {
     this._x9425fdc2df7bcafc = 0;
     this._x2350dfd8c7639ed6 = -5.0;
     this._x38c942a9bdfcbac4 = 2.0;
     while (true)
     {
         if (3 != 0)
         {
             this._xdee5cbd981b6d49e = 15.0;
             if (0 == 0)
             {
                 this._xec9380575da42aee = -10.0;
             }
         }
         this._xd522fee165affb59 = 10.0;
         this._x441f2c3a7d69c688 = 1.0;
         this._x87a7fc6a72741c2e = method;
         this.Training = training;
         this._x9eeb587621db687c = false;
         if (0 == 0)
         {
             if (0 == 0)
             {
                 this._xab248fa87e95a7df = false;
                 this._x1e074b5762f8595b = new SVMTrain(this._x87a7fc6a72741c2e, training);
                 return;
             }
             return;
         }
     }
 }
开发者ID:neismit,项目名称:emds,代码行数:33,代码来源:SVMSearchTrain.cs


示例6: QuickPropagation

 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
 ///            a learning rate to start with.  If it fails to converge, 
 ///            then drop it.  Just like backprop, except QPROP can 
 ///            take higher learning rates.</param>
 public QuickPropagation(BasicNetwork network,
                         IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
 }
开发者ID:neismit,项目名称:emds,代码行数:16,代码来源:QuickPropagation.cs


示例7: PruneIncremental

 public PruneIncremental(IMLDataSet training, INeuralNetworkPattern pattern, int iterations, int weightTries, int numTopResults, IStatusReportable report)
     : base(report)
 {
     goto Label_008E;
     Label_0031:
     this._x7890c8b3a33b26e2 = new double[numTopResults];
     return;
     Label_008E:
     this._x0b03741e8f17a9f7 = false;
     this._xab3ddaff42dd298a = new List<HiddenLayerParams>();
     this._x823a2b9c8bf459c5 = training;
     if ((((uint) numTopResults) - ((uint) weightTries)) < 0)
     {
         goto Label_0031;
     }
     this._x49d5b7c4ad0e0bdd = pattern;
     if ((((uint) iterations) - ((uint) iterations)) <= uint.MaxValue)
     {
         this._xdbf51c857aeb8093 = iterations;
         this._x64343a0786fb9a3f = report;
         this._xe009ad1bd0a8245a = weightTries;
         this._xc5f756e0b4a83af0 = new BasicNetwork[numTopResults];
         goto Label_0031;
     }
     goto Label_008E;
 }
开发者ID:neismit,项目名称:emds,代码行数:26,代码来源:PruneIncremental.cs


示例8: TrainBayesian

 /// <summary>
 /// Construct a Bayesian trainer. Use K2 to search, and the SimpleEstimator
 /// to estimate probability.  Init as Naive Bayes
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theData">The data to train.</param>
 /// <param name="theMaximumParents">The max number of parents.</param>
 public TrainBayesian(BayesianNetwork theNetwork, IMLDataSet theData,
                      int theMaximumParents)
     : this(theNetwork, theData, theMaximumParents,
            BayesianInit.InitNaiveBayes, new SearchK2(),
            new SimpleEstimator())
 {
 }
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:14,代码来源:TrainBayesian.cs


示例9: TrainFlatNetworkBackPropagation

 public TrainFlatNetworkBackPropagation(FlatNetwork network, IMLDataSet training, double theLearningRate, double theMomentum)
     : base(network, training)
 {
     this._xef52c16be8e501c9 = theMomentum;
     this._x9b481c22b6706459 = theLearningRate;
     this._xe4def4d471bbc130 = new double[network.Weights.Length];
 }
开发者ID:neismit,项目名称:emds,代码行数:7,代码来源:TrainFlatNetworkBackPropagation.cs


示例10: ScaledConjugateGradient

 /// <summary>
 /// Construct a training class.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public ScaledConjugateGradient(IContainsFlat network,
                                IMLDataSet training) : base(network, training)
 {
     var rpropFlat = new TrainFlatNetworkSCG(
         network.Flat, Training);
     FlatTraining = rpropFlat;
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:13,代码来源:ScaledConjugateGradient.cs


示例11: Init

        /// <inheritdoc />
        public override void Init(BasicNetwork theNetwork, IMLDataSet theTraining)
        {
            base.Init(theNetwork, theTraining);
            int weightCount = theNetwork.Structure.Flat.Weights.Length;

            _training = theTraining;
            _network = theNetwork;

            _hessianMatrix = new Matrix(weightCount, weightCount);
            _hessian = _hessianMatrix.Data;

            // create worker(s)
            var determine = new DetermineWorkload(
                ThreadCount, _training.Count);

            _workers = new ChainRuleWorker[determine.ThreadCount];

            int index = 0;

            // handle CPU
            foreach (IntRange r in determine.CalculateWorkers())
            {
                _workers[index++] = new ChainRuleWorker((FlatNetwork) _flat.Clone(),
                    _training.OpenAdditional(), r.Low,
                    r.High);
            }
        }
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:28,代码来源:HessianCR.cs


示例12: evaluateNetwork

        public static void evaluateNetwork(BasicNetwork network, IMLDataSet training)
        {
            double total = 0;
            int seed = 0;
            int completed = 0;

            Stopwatch sw = new Stopwatch();

            sw.Start();
            while (completed < SAMPLE_SIZE)
            {
                new ConsistentRandomizer(-1, 1, seed).Randomize(network);
                int iter = Evaluate(network, training);
                if (iter == -1)
                {
                    seed++;
                }
                else
                {
                    total += iter;
                    seed++;
                    completed++;
                }
            }

            sw.Stop();


            Console.WriteLine(network.GetActivation(1).GetType().Name + ": time="
                    + Format.FormatInteger((int)sw.ElapsedMilliseconds)
                    + "ms, Avg Iterations: "
                    + Format.FormatInteger((int)(total / SAMPLE_SIZE)));

        }
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:34,代码来源:ElliottBenchmark.cs


示例13: Create

 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (!(method is SupportVectorMachine))
     {
         throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName);
     }
     double defaultValue = 1.0 / ((double) ((SupportVectorMachine) method).InputCount);
     while (true)
     {
         double num4;
         SVMTrain train;
         double num2 = 1.0;
         IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr);
         ParamsHolder holder = new ParamsHolder(theParams);
         double num3 = holder.GetDouble("GAMMA", false, defaultValue);
         do
         {
             num4 = holder.GetDouble("C", false, num2);
             train = new SVMTrain((SupportVectorMachine) method, training) {
                 Gamma = num3
             };
         }
         while (((uint) defaultValue) > uint.MaxValue);
         if ((((uint) num2) + ((uint) num3)) <= uint.MaxValue)
         {
             train.C = num4;
             return train;
         }
     }
 }
开发者ID:neismit,项目名称:emds,代码行数:30,代码来源:SVMFactory.cs


示例14: EstimateXi

        public override double[][][] EstimateXi(IMLDataSet sequence,
                ForwardBackwardCalculator fbc, HiddenMarkovModel hmm)
        {
            if (sequence.Count <= 1)
            {
                throw new EncogError(
                        "Must have more than one observation");
            }

            double[][][] xi = EngineArray.AllocDouble3D((int)sequence.Count - 1, hmm
                    .StateCount, hmm.StateCount);

            for (int t = 0; t < (sequence.Count - 1); t++)
            {
                IMLDataPair observation = sequence[t+1];

                for (int i = 0; i < hmm.StateCount; i++)
                {
                    for (int j = 0; j < hmm.StateCount; j++)
                    {
                        xi[t][i][j] = fbc.AlphaElement(t, i)
                                * hmm.TransitionProbability[i][j]
                                * hmm.StateDistributions[j].Probability(
                                        observation) * fbc.BetaElement(t + 1, j);
                    }
                }
            }

            return xi;
        }
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:30,代码来源:TrainBaumWelchScaled.cs


示例15: Backpropagation

 public Backpropagation(IContainsFlat network, IMLDataSet training, double learnRate, double momentum)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     TrainFlatNetworkBackPropagation propagation = new TrainFlatNetworkBackPropagation(network.Flat, this.Training, learnRate, momentum);
     base.FlatTraining = propagation;
 }
开发者ID:neismit,项目名称:emds,代码行数:7,代码来源:Backpropagation.cs


示例16: Create

        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "SVM Train training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            double defaultGamma = 1.0d/((SupportVectorMachine) method).InputCount;
            double defaultC = 1.0d;

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double gamma = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                            false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine) method, training);
            result.Gamma = gamma;
            result.C = c;
            return result;
        }
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:33,代码来源:SVMFactory.cs


示例17: InputFieldMLDataSet

 /// <summary>
 /// Construct a input field based on a NeuralDataSet.
 /// </summary>
 /// <param name="usedForNetworkInput">Is this field used for neural input.</param>
 /// <param name="data">The data set to use.</param>
 /// <param name="offset">The input or ideal index to use. This treats the input 
 /// and ideal as one long array, concatenated together.</param>
 public InputFieldMLDataSet(bool usedForNetworkInput,
                                IMLDataSet data, int offset)
 {
     _data = data;
     _offset = offset;
     UsedForNetworkInput = usedForNetworkInput;
 }
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:14,代码来源:InputFieldMLDataSet.cs


示例18: Create

        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
				MLTrainFactory.PropertyPopulationSize, false, 5000);
		
		IMLTrain train = new MLMethodGeneticAlgorithm( () => {
			
				IMLMethod result = (IMLMethod) ObjectCloner.DeepCopy(method);
				((IMLResettable)result).Reset();
				return result;
			}, score, populationSize);

		return train;

       
        }
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:34,代码来源:GeneticFactory.cs


示例19: Create

        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork) method, score, startTemp, stopTemp, cycles);

            return train;
        }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:33,代码来源:AnnealFactory.cs


示例20: SaveTraining

 /// <summary>
 /// Saves an IMLDataset to a file.
 /// </summary>
 /// <param name="directory">The directory.</param>
 /// <param name="file">The file.</param>
 /// <param name="trainintoSave">The traininto save.</param>
  public static void SaveTraining(string directory, string file, IMLDataSet trainintoSave)
  {
      FileInfo networkFile = FileUtil.CombinePath(new FileInfo(directory), file);
      //save our training file.
      EncogUtility.SaveEGB(networkFile, trainintoSave);
      return;
  }
开发者ID:tonyc2a,项目名称:encog-dotnet-core,代码行数:13,代码来源:NetworkUtility.cs



注:本文中的IMLDataSet类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# IMLItem类代码示例发布时间:2022-05-24
下一篇:
C# IMLData类代码示例发布时间:2022-05-24
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap