• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C# BasicNetwork类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中BasicNetwork的典型用法代码示例。如果您正苦于以下问题:C# BasicNetwork类的具体用法?C# BasicNetwork怎么用?C# BasicNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



BasicNetwork类属于命名空间,在下文中一共展示了BasicNetwork类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: NeuralGeneticAlgorithm

        /// <summary>
        /// Construct a neural genetic algorithm.
        /// </summary>
        ///
        /// <param name="network">The network to base this on.</param>
        /// <param name="randomizer">The randomizer used to create this initial population.</param>
        /// <param name="calculateScore">The score calculation object.</param>
        /// <param name="populationSize">The population size.</param>
        /// <param name="mutationPercent">The percent of offspring to mutate.</param>
        /// <param name="percentToMate">The percent of the population allowed to mate.</param>
        public NeuralGeneticAlgorithm(BasicNetwork network,
            IRandomizer randomizer, ICalculateScore calculateScore,
            int populationSize, double mutationPercent,
            double percentToMate)
            : base(TrainingImplementationType.Iterative)
        {
            Genetic = new NeuralGeneticAlgorithmHelper
                           {
                               CalculateScore = new GeneticScoreAdapter(calculateScore)
                           };
            IPopulation population = new BasicPopulation(populationSize);
            Genetic.MutationPercent = mutationPercent;
            Genetic.MatingPopulation = percentToMate*2;
            Genetic.PercentToMate = percentToMate;
            Genetic.Crossover = new Splice(network.Structure.CalculateSize()/3);
            Genetic.Mutate = new MutatePerturb(4.0d);
            Genetic.Population = population;
            for (int i = 0; i < population.PopulationSize; i++)
            {
                var chromosomeNetwork = (BasicNetwork) (network
                                                           .Clone());
                randomizer.Randomize(chromosomeNetwork);

                var genome = new NeuralGenome(chromosomeNetwork) {GA = Genetic};
                Genetic.PerformCalculateScore(genome);
                Genetic.Population.Add(genome);
            }
            population.Sort();
        }
开发者ID:fxmozart,项目名称:encog-dotnet-core,代码行数:39,代码来源:NeuralGeneticAlgorithm.cs


示例2: ManhattanPropagation

 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(BasicNetwork network,
     IMLDataSet training, double learnRate)
     : base(network, training)
 {
     _learningRate = learnRate;
     _zeroTolerance = RPROPConst.DefaultZeroTolerance;
 }
开发者ID:Romiko,项目名称:encog-dotnet-core,代码行数:14,代码来源:ManhattanPropagation.cs


示例3: NetworkToString

        /// <summary>
        /// Format the network as a human readable string that lists the 
        /// hidden layers.
        /// </summary>
        /// <param name="network">The network to format.</param>
        /// <returns>A human readable string.</returns>
        public static String NetworkToString(BasicNetwork network)
        {
            StringBuilder result = new StringBuilder();
            int num = 1;

            ILayer layer = network.GetLayer(BasicNetwork.TAG_INPUT);

            // display only hidden layers
            while (layer.Next.Count > 0)
            {
                layer = layer.Next[0].ToLayer;

                if (result.Length > 0)
                {
                    result.Append(",");
                }
                result.Append("H");
                result.Append(num++);
                result.Append("=");
                result.Append(layer.NeuronCount);
            }

            return result.ToString();

        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:31,代码来源:PruneIncremental.cs


示例4: TrainHopfield

 /// <summary>
 /// Construct a Hopfield training class.
 /// </summary>
 /// <param name="trainingSet">The training set to use.</param>
 /// <param name="network">The network to train.</param>
 public TrainHopfield(INeuralDataSet trainingSet,
          BasicNetwork network)
 {
     this.network = network;
     this.Training = trainingSet;
     this.Error = 0;
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:12,代码来源:TrainHopfield.cs


示例5: Randomize

 /// <inheritdoc />
 public override void Randomize(BasicNetwork network)
 {
     for (var i = 0; i < network.Layers.Count - 1; i++)
     {
         RandomizeLayer(network, i);
     }
 }
开发者ID:legendvijay,项目名称:aifh,代码行数:8,代码来源:XaiverRandomizeNetwork.cs


示例6: CalculateDepth

 /// <summary>
 /// Construct the depth calculation object.
 /// </summary>
 /// <param name="network">The network that we are calculating for.</param>
 public CalculateDepth(BasicNetwork network)
 {
     this.network = network;
     this.outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);
     if( this.outputLayer!=null )
         Calculate(0, this.outputLayer);
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:11,代码来源:CalculateDepth.cs


示例7: RPROPJob

 /// <summary>
 /// Construct an RPROP job. For more information on RPROP see the
 /// ResilientPropagation class. 
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="loadToMemory">True if binary training data should be loaded to memory.</param>
 /// <param name="localRatio">The local ratio, used if this job is performed by an OpenCL Device.</param>
 /// <param name="globalRatio">The global ratio, used if this job is performed by an OpenCL Device.</param>
 /// <param name="segmentationRatio">The segmentation ratio, used if this job is performed by an OpenCL Device.</param>
 /// <param name="iterationsPer">How many iterations to process per cycle.</param>
 public RPROPJob(BasicNetwork network, INeuralDataSet training,
         bool loadToMemory, double localRatio, int globalRatio, double segmentationRatio, int iterationsPer) :
     this(network, training,
          loadToMemory, RPROPConst.DEFAULT_INITIAL_UPDATE,
          RPROPConst.DEFAULT_MAX_STEP, localRatio, globalRatio, segmentationRatio, iterationsPer)
 {
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:18,代码来源:RPROPJob.cs


示例8: CrossTraining

 /// <summary>
 /// Construct a cross trainer. 
 /// </summary>
 /// <param name="network">The network.</param>
 /// <param name="training">The training data.</param>
 public CrossTraining(BasicNetwork network,
          FoldedDataSet training)
 {
     this.network = network;
     Training = training;
     this.folded = training;
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:12,代码来源:CrossTraining.cs


示例9: Randomize

 /// <inheritdoc />
 public override void Randomize(BasicNetwork network)
 {
     for (var i = 0; i < network.Weights.Length; i++)
     {
         network.Weights[i] = Rnd.NextDouble(_low, _high);
     }
 }
开发者ID:legendvijay,项目名称:aifh,代码行数:8,代码来源:RangeRandomizeNetwork.cs


示例10: SVDTraining

        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        public SVDTraining(BasicNetwork network, INeuralDataSet training)
        {
            ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);

            if (outputLayer == null)
            {
                throw new TrainingError("SVD requires an output layer.");
            }

            if (outputLayer.NeuronCount != 1)
            {
                throw new TrainingError("SVD requires an output layer with a single neuron.");
            }

            if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null)
                throw new TrainingError("SVD is only tested to work on radial basis function networks.");

            rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER);

            this.Training = training;
            this.network = network;
            this.trainingLength = (int)this.Training.InputSize;

            BasicNeuralData input = new BasicNeuralData(this.Training.InputSize);
            BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize);
            this.pair = new BasicNeuralDataPair(input, ideal);
        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:32,代码来源:SVDTraining.cs


示例11: Equals

        /// <summary>
        /// Determine if the two neural networks are equal.
        /// </summary>
        ///
        /// <param name="network1">The first network.</param>
        /// <param name="network2">The second network.</param>
        /// <param name="precision">How many decimal places to check.</param>
        /// <returns>True if the two networks are equal.</returns>
        public static bool Equals(BasicNetwork network1,
                                  BasicNetwork network2, int precision)
        {
            double[] array1 = NetworkToArray(network1);
            double[] array2 = NetworkToArray(network2);

            if (array1.Length != array2.Length)
            {
                return false;
            }

            double test = Math.Pow(10.0d, precision);
            if (Double.IsInfinity(test) || (test > Int64.MaxValue))
            {
                throw new NeuralNetworkError("Precision of " + precision
                                             + " decimal places is not supported.");
            }

            for (int i = 0; i < array1.Length; i++)
            {
                var l1 = (long) (array1[i]*test);
                var l2 = (long) (array2[i]*test);
                if (l1 != l2)
                {
                    return false;
                }
            }

            return true;
        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:38,代码来源:NetworkCODEC.cs


示例12: QuickPropagation

 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
 ///            a learning rate to start with.  If it fails to converge, 
 ///            then drop it.  Just like backprop, except QPROP can 
 ///            take higher learning rates.</param>
 public QuickPropagation(BasicNetwork network,
                         IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
 }
开发者ID:neismit,项目名称:emds,代码行数:16,代码来源:QuickPropagation.cs


示例13: NetworkSize

        /// <summary>
        /// Determine the network size.
        /// </summary>
        /// <param name="network">The network to check.</param>
        /// <returns>The size of the network.</returns>
        public static int NetworkSize(BasicNetwork network)
        {

            // see if there is already an up to date flat network
            if (network.Structure.Flat != null
                && (network.Structure.FlatUpdate == FlatUpdateNeeded.None
                || network.Structure.FlatUpdate == FlatUpdateNeeded.Unflatten))
            {
                return network.Structure.Flat.Weights.Length;
            }

            int index = 0;

            // loop over all of the layers, take the output layer first
            foreach (ILayer layer in network.Structure.Layers)
            {

                // see if the previous layer, which is the next layer that the loop will hit,
                // is either a connection to a BasicLayer or a ContextLayer.
                ISynapse synapse = network.Structure
                        .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer));
                ISynapse contextSynapse = network.Structure.FindPreviousSynapseByLayerType(
                        layer, typeof(ContextLayer));

                // get a list of of the previous synapses to this layer
                IList<ISynapse> list = network.Structure.GetPreviousSynapses(layer);

                // If there is not a BasicLayer or contextLayer as the next layer, then
                // just take the first synapse of any type.
                if (synapse == null && contextSynapse == null && list.Count > 0)
                {
                    synapse = list[0];
                }

                // is there any data to record for this synapse?
                if (synapse != null && synapse.WeightMatrix != null)
                {
                    // process each weight matrix
                    for (int x = 0; x < synapse.ToNeuronCount; x++)
                    {

                        index += synapse.FromNeuronCount;


                        if (synapse.ToLayer.HasBias)
                        {
                            index++;
                        }

                        if (contextSynapse != null)
                        {
                            index += contextSynapse.FromNeuronCount;
                        }
                    }
                }
            }

            return index;
        }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:64,代码来源:NetworkCODEC.cs


示例14: TrainInstar

 /// <summary>
 /// Construct the instar training object.
 /// </summary>
 /// <param name="network">The network to be trained.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learningRate">The learning rate.</param>
 public TrainInstar(BasicNetwork network, INeuralDataSet training,
         double learningRate)
 {
     this.network = network;
     this.training = training;
     this.learningRate = learningRate;
     this.parts = new FindCPN(network);
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:14,代码来源:TrainInstar.cs


示例15: ScaledConjugateGradient

 /// <summary>
 /// Construct a training class.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public ScaledConjugateGradient(BasicNetwork network,
         INeuralDataSet training)
     : base(network, training)
 {
     TrainFlatNetworkSCG rpropFlat = new TrainFlatNetworkSCG(
             network.Structure.Flat,
             this.Training);
     this.FlatTraining = rpropFlat;
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:14,代码来源:ScaledConjugateGradient.cs


示例16: QuickPropagation

 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
 ///            a learning rate to start with.  If it fails to converge, 
 ///            then drop it.  Just like backprop, except QPROP can 
 ///            take higher learning rates.</param>
 public QuickPropagation(BasicNetwork network,
     IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
     LastDelta = new double[Network.Flat.Weights.Length];
     OutputEpsilon = 1.0;
 }
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:18,代码来源:QuickPropagation.cs


示例17: BPROPJob

        /// <summary>
        /// Construct a job definition for RPROP. For more information on backprop,
        /// see the Backpropagation class. 
        /// </summary>
        /// <param name="network">The network to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="loadToMemory">Should binary data be loaded to memory?</param>
        /// <param name="learningRate">THe learning rate to use.</param>
        /// <param name="momentum">The momentum to use.</param>
        public BPROPJob(BasicNetwork network, INeuralDataSet training,
                 bool loadToMemory, double learningRate,
                 double momentum)
            : base(network, training, loadToMemory)
        {

            this.LearningRate = learningRate;
            this.Momentum = momentum;
        }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:18,代码来源:BPROPJob.cs


示例18: ArrayToNetwork

        /// <summary>
        /// Use an array to populate the memory of the neural network.
        /// </summary>
        /// <param name="array">An array of doubles.</param>
        /// <param name="network">The network to encode.</param>
        public static void ArrayToNetwork(double[] array,
                 BasicNetwork network)
        {
            int index = 0;

            foreach (ILayer layer in network.Structure.Layers)
            {
                index = NetworkCODEC.ProcessLayer(network, layer, array, index);
            }

            network.Structure.FlatUpdate = FlatUpdateNeeded.Flatten;
        }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:17,代码来源:NetworkCODEC.cs


示例19: TrainAdaline

        /// <summary>
        /// Construct an ADALINE trainer.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        /// <param name="learningRate">The learning rate.</param>
        public TrainAdaline(BasicNetwork network, IMLDataSet training,
                            double learningRate) : base(TrainingImplementationType.Iterative)
        {
            if (network.LayerCount > 2)
            {
                throw new NeuralNetworkError(
                    "An ADALINE network only has two layers.");
            }
            _network = network;

            _training = training;
            _learningRate = learningRate;
        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:20,代码来源:TrainAdaline.cs


示例20: BackPropagation

 /// <summary>
 ///     Construct the backpropagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use.</param>
 /// <param name="theLearningRate">The learning rate.  Can be changed as training runs.</param>
 /// <param name="theMomentum">The momentum.  Can be changed as training runs.</param>
 public BackPropagation(BasicNetwork theNetwork, IList<BasicData> theTraining, double theLearningRate,
     double theMomentum)
 {
     BatchSize = 500;
     Stochastic = new MersenneTwisterGenerateRandom();
     NesterovUpdate = true;
     _network = theNetwork;
     _training = theTraining;
     LearningRate = theLearningRate;
     Momentum = theMomentum;
     _gradients = new GradientCalc(_network, new CrossEntropyErrorFunction(), this);
     _lastDelta = new double[theNetwork.Weights.Length];
 }
开发者ID:gyantal,项目名称:SQLab,代码行数:20,代码来源:BackPropagation.cs



注:本文中的BasicNetwork类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# BasicRequest类代码示例发布时间:2022-05-24
下一篇:
C# BasicMLDataSet类代码示例发布时间:2022-05-24
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap