• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C# IActivationFunction类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中IActivationFunction的典型用法代码示例。如果您正苦于以下问题:C# IActivationFunction类的具体用法?C# IActivationFunction怎么用?C# IActivationFunction使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



IActivationFunction类属于命名空间,在下文中一共展示了IActivationFunction类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: NormalizeOneDesiredOutputInPlace

        /// <summary>
        ///   Normalize in place desired output
        /// </summary>
        /// <param name = "function">Activation function used</param>
        /// <param name = "output">Normalize output</param>
        /// <returns>Normalized output</returns>
        public static double[] NormalizeOneDesiredOutputInPlace(IActivationFunction function, double[] output)
        {
            if (function is ActivationSigmoid)
            {
                for (int i = 0, n = output.Length; i < n; i++)
                {
                    output[i] = (output[i] > 0 ? 0.8 : 0.2);
                }
            }
            else if (function is ActivationTANH)
            {
                for (int i = 0, n = output.Length; i < n; i++)
                {
                    output[i] = (output[i] > 0.5 ? 0.5 : -0.5);
                }
            }
            else if (function is ActivationLinear)
            {
                /*do nothing*/
            }
            else
            {
                throw new ArgumentException("Unknown activation function");
            }

            return output;
        }
开发者ID:jorik041,项目名称:soundfingerprinting,代码行数:33,代码来源:NormalizeUtils.cs


示例2: NormalizeDesiredInputInPlace

 /// <summary>
 ///   Normalize in place desired input
 /// </summary>
 /// <param name = "function">Activation function</param>
 /// <param name = "input">Input to normalize</param>
 /// <returns>Reference to normalized input</returns>
 public static double[] NormalizeDesiredInputInPlace(IActivationFunction function, double[] input)
 {
     if (function is ActivationTANH)
     {
         for (int i = 0, n = input.Length; i < n; i++)
         {
             input[i] = (input[i] == 0 ? 0.0f : (input[i] < 0 ? -0.8f : 0.8f));
         }
     }
     else if (function is ActivationSigmoid)
     {
         for (int i = 0, n = input.Length; i < n; i++)
         {
             input[i] = (input[i] == 0 ? 0.0f : (input[i] < 0 ? 0.2f : 0.8f));
         }
     }
     else if (function is ActivationLinear)
     {
         /*do nothing*/
     }
     else
     {
         throw new ArgumentException("Unknown activation function");
     }
     return input;
 }
开发者ID:jorik041,项目名称:soundfingerprinting,代码行数:32,代码来源:NormalizeUtils.cs


示例3: Conv2DLayer

 /// <summary>
 ///     Construct a 2D convolution layer.
 /// </summary>
 /// <param name="theActivation">The activation function.</param>
 /// <param name="theNumFilters">The number of filters.</param>
 /// <param name="theFilterRows">The rows in each filter.</param>
 /// <param name="theFilterColumns">The columns in each filter.</param>
 public Conv2DLayer(IActivationFunction theActivation, int theNumFilters, int theFilterRows, int theFilterColumns)
 {
     Activation = theActivation;
     FilterRows = theFilterRows;
     FilterColumns = theFilterColumns;
     _numFilters = theNumFilters;
 }
开发者ID:legendvijay,项目名称:aifh,代码行数:14,代码来源:Conv2DLayer.cs


示例4: SetActivationFunction

		/// <summary>
		/// Set new activation function for all neurons of the layer.
		/// </summary>
		/// 
		/// <param name="function">Activation function to set.</param>
		/// 
		/// <remarks><para>The methods sets new activation function for each neuron by setting
		/// their <see cref="ActivationNeuron.ActivationFunction"/> property.</para></remarks>
		/// 
		public void SetActivationFunction( IActivationFunction function )
		{
			for ( int i = 0; i < neurons.Length; i++ )
			{
				( (ActivationNeuron) neurons[i] ).ActivationFunction = function;
			}
		}
开发者ID:holisticware-admin,项目名称:MonoVersal.AForgeNET,代码行数:16,代码来源:ActivationLayer.cs


示例5: FlatLayer

 public FlatLayer(IActivationFunction activation, int count, double biasActivation)
 {
     this.Activation = activation;
     this._x10f4d88af727adbc = count;
     this._x25922738b86264c8 = biasActivation;
     this._x4d51c0aa16352a14 = null;
 }
开发者ID:neismit,项目名称:emds,代码行数:7,代码来源:FlatLayer.cs


示例6: FastCyclicNetwork

        /// <summary>
        /// Constructs a FastCyclicNetwork with the provided pre-built FastConnection array and 
        /// associated data.
        /// </summary>
        public FastCyclicNetwork(FastConnection[] connectionArray,
                                 IActivationFunction[] neuronActivationFnArray,
                                 double[][] neuronAuxArgsArray,
                                 int neuronCount,
                                 int inputNeuronCount,
                                 int outputNeuronCount,
                                 int timestepsPerActivation)
        {
            _connectionArray = connectionArray;
            _neuronActivationFnArray = neuronActivationFnArray;
            _neuronAuxArgsArray = neuronAuxArgsArray;

            // Create neuron pre- and post-activation signal arrays.
            _preActivationArray = new double[neuronCount];
            _postActivationArray = new double[neuronCount];

            // Wrap sub-ranges of the neuron signal arrays as input and output arrays for IBlackBox.
            // Offset is 1 to skip bias neuron (The value at index 1 is the first black box input).
            _inputSignalArrayWrapper = new SignalArray(_postActivationArray, 1, inputNeuronCount);

            // Offset to skip bias and input neurons. Output neurons follow input neurons in the arrays.
            _outputSignalArrayWrapper = new SignalArray(_postActivationArray, inputNeuronCount+1, outputNeuronCount);

            // Store counts for use during activation.
            _inputNeuronCount = inputNeuronCount;
            _inputAndBiasNeuronCount = inputNeuronCount+1;
            _outputNeuronCount = outputNeuronCount;
            _timestepsPerActivation = timestepsPerActivation;

            // Initialise the bias neuron's fixed output value.
            _postActivationArray[0] = 1.0;
        }
开发者ID:jbrant,项目名称:SharpBackpropNeat,代码行数:36,代码来源:FastCyclicNetwork.cs


示例7: Neuron

 public Neuron(IActivationFunction activationFunc, double charge)
 {
     Charge = charge;
     Error = 0;
     Activation = activationFunc;
     In = new Dictionary<Neuron, double>();
 }
开发者ID:KineticCookie,项目名称:FuzzyDev,代码行数:7,代码来源:NeuralNetwork.cs


示例8: NeatActivationFunctionLibrary

 /// <summary>
 /// Construct with a single IActivationFunction.
 /// </summary>
 /// <param name="activationFn"></param>
 public NeatActivationFunctionLibrary(IActivationFunction activationFn)
 {
     _activationFn = activationFn;
     _activationFnInfo = new ActivationFunctionInfo(0, 1.0, activationFn);
     _activationFnInfoList = new List<ActivationFunctionInfo>(1);
     _activationFnInfoList.Add(_activationFnInfo);
 }
开发者ID:BLueders,项目名称:SharpNeat_Playground,代码行数:11,代码来源:NeatActivationFunctionLibrary.cs


示例9: NEATNeuronGene

 /// <summary>
 /// Construct a neuron gene.
 /// </summary>
 /// <param name="type">The neuron type.</param>
 /// <param name="theActivationFunction">The activation function.</param>
 /// <param name="id">The neuron id.</param>
 /// <param name="innovationId">The innovation id.</param>
 public NEATNeuronGene(NEATNeuronType type, IActivationFunction theActivationFunction, long id, long innovationId)
 {
     NeuronType = type;
     InnovationId = innovationId;
     Id = id;
     ActivationFunction = theActivationFunction;
 }
开发者ID:jongh0,项目名称:MTree,代码行数:14,代码来源:NEATNeuronGene.cs


示例10: DecodeToConcurrentNetwork

		static public INetwork DecodeToConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
		{
		//----- Loop the neuronGenes. Create Neuron for each one.
			// Store a table of neurons keyed by their id.
			Hashtable neuronTable = new Hashtable(g.NeuronGeneList.Count);
			NeuronList neuronList = new NeuronList();

			foreach(NeuronGene neuronGene in g.NeuronGeneList)
			{
				Neuron newNeuron = new Neuron(activationFn, neuronGene.NeuronType, neuronGene.InnovationId);
				neuronTable.Add(newNeuron.Id, newNeuron);
				neuronList.Add(newNeuron);
			}

		//----- Loop the connection genes. Create a Connection for each one and bind them to the relevant Neurons.
			foreach(ConnectionGene connectionGene in g.ConnectionGeneList)
			{
				Connection newConnection = new Connection(connectionGene.SourceNeuronId, connectionGene.TargetNeuronId, connectionGene.Weight);

				// Bind the connection to it's source neuron.
				newConnection.SetSourceNeuron((Neuron)neuronTable[connectionGene.SourceNeuronId]);

				// Store the new connection against it's target neuron.
				((Neuron)(neuronTable[connectionGene.TargetNeuronId])).ConnectionList.Add(newConnection);
			}

			return new ConcurrentNetwork(neuronList);
		}
开发者ID:zaheeroz,项目名称:qd-maze-simulator,代码行数:28,代码来源:GenomeDecoder.cs


示例11: Write

        /// <param name="activationFn">Not strictly part of a genome. But it is useful to document which function
        /// the genome is supposed to run against when decoded into a network.</param>
        public static void Write(XmlNode parentNode, NeatGenome genome, IActivationFunction activationFn)
        {
            //----- Start writing. Create document root node.
            XmlElement xmlGenome = XmlUtilities.AddElement(parentNode, "genome");
            XmlUtilities.AddAttribute(xmlGenome, "id", genome.GenomeId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "species-id", genome.SpeciesId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "age", genome.GenomeAge.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "fitness", genome.Fitness.ToString("0.00"));
            XmlUtilities.AddAttribute(xmlGenome, "activation-fn-id", activationFn.FunctionId);

            //----- Write neurons.
            XmlElement xmlNeurons = XmlUtilities.AddElement(xmlGenome, "neurons");
            foreach(NeuronGene neuronGene in genome.NeuronGeneList)
                WriteNeuron(xmlNeurons, neuronGene);

            //----- Write modules.
            XmlElement xmlModules = XmlUtilities.AddElement(xmlGenome, "modules");
            foreach (ModuleGene moduleGene in genome.ModuleGeneList)
                WriteModule(xmlModules, moduleGene);

            //----- Write Connections.
            XmlElement xmlConnections = XmlUtilities.AddElement(xmlGenome, "connections");
            foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
                WriteConnectionGene(xmlConnections, connectionGene);

            //----- Write beahavior
            if(genome.Behavior!=null)
            {
                if(genome.Behavior.behaviorList!=null)
                {
                    XmlElement xmlBehavior = XmlUtilities.AddElement(xmlGenome, "behavior");
                    WriteBehavior(xmlBehavior,genome.Behavior);
                }
            }
        }
开发者ID:OptimusLime,项目名称:IESoR,代码行数:37,代码来源:XmlGenomeWriterStatic.cs


示例12: DecodeToFastConcurrentMultiplicativeNetwork

        public static FastConcurrentMultiplicativeNetwork DecodeToFastConcurrentMultiplicativeNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
        {
            int outputNeuronCount = g.OutputNeuronCount;
            int neuronGeneCount = g.NeuronGeneList.Count;

            // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
            // any reason to ever have more than one bias node - although there may be 0.
            int neuronGeneIdx=0;
            for(; neuronGeneIdx<neuronGeneCount; neuronGeneIdx++)
            {
                if(g.NeuronGeneList[neuronGeneIdx].NeuronType != NeuronType.Bias)
                    break;
            }
            int biasNodeCount = neuronGeneIdx;
            int inputNeuronCount = g.InputNeuronCount;

            // ConnectionGenes point to a neuron ID. We need to map this ID to a 0 based index for
            // efficiency. To do this we build a table of indexes (ints) keyed on neuron ID.
            // TODO: An alternative here would be to forgo the building of a table and do a binary
            // search directly on the NeuronGeneList - probably a good idea to use a heuristic based upon
            // neuroncount*connectioncount that decides on which technique to use. Small networks will
            // likely be faster to decode using the binary search.

            // Actually we can partly achieve the above optimzation by using HybridDictionary instead of Hashtable.
            // Although creating a table is a bit expensive.
            HybridDictionary neuronIndexTable = new HybridDictionary(neuronGeneCount);
            for(int i=0; i<neuronGeneCount; i++)
                neuronIndexTable.Add(g.NeuronGeneList[i].InnovationId, i);

            // Count how many of the connections are actually enabled. TODO: make faster - store disable count?
            int connectionGeneCount = g.ConnectionGeneList.Count;
            int connectionCount=connectionGeneCount;
            //			for(int i=0; i<connectionGeneCount; i++)
            //			{
            //				if(g.ConnectionGeneList[i].Enabled)
            //					connectionCount++;
            //			}

            // Now we can build the connection array(s).
            FloatFastConnection[] connectionArray = new FloatFastConnection[connectionCount];
            int connectionIdx=0;
            for(int connectionGeneIdx=0; connectionGeneIdx<connectionCount; connectionGeneIdx++)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionIdx];
                connectionArray[connectionIdx].sourceNeuronIdx = (int)neuronIndexTable[connectionGene.SourceNeuronId];
                connectionArray[connectionIdx].targetNeuronIdx = (int)neuronIndexTable[connectionGene.TargetNeuronId];
                connectionArray[connectionIdx].weight = (float)connectionGene.Weight;
                connectionIdx++;
            }

            // Now sort the connection array on sourceNeuronIdx, secondary sort on targetNeuronIdx.
            // TODO: custom sort routine to prevent boxing/unboxing required by Array.Sort(ValueType[])
            //Array.Sort(connectionArray, fastConnectionComparer);
            QuickSortFastConnections(0, fastConnectionArray.Length-1);

            return new FastConcurrentMultiplicativeNetwork(
                biasNodeCount, inputNeuronCount,
                outputNeuronCount, neuronGeneCount,
                connectionArray, activationFn);
        }
开发者ID:jtglaze,项目名称:IndependentWork2013,代码行数:60,代码来源:GenomeDecoder.cs


示例13: CalculateGradient

 /// <summary>
 /// Not used for this type of plugin.
 /// </summary>
 ///
 /// <param name="gradients">Not used.</param>
 /// <param name="layerOutput">Not used.</param>
 /// <param name="weights">Not used.</param>
 /// <param name="layerDelta">Not used.</param>
 /// <param name="af">Not used.</param>
 /// <param name="index">Not used.</param>
 /// <param name="fromLayerIndex">Not used.</param>
 /// <param name="fromLayerSize">Not used.</param>
 /// <param name="toLayerIndex">Not used.</param>
 /// <param name="toLayerSize">Not used.</param>
 public void CalculateGradient(double[] gradients,
                               double[] layerOutput, double[] weights,
                               double[] layerDelta, IActivationFunction af,
                               int index, int fromLayerIndex, int fromLayerSize,
                               int toLayerIndex, int toLayerSize)
 {
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:21,代码来源:SystemLoggingPlugin.cs


示例14: FloatFastConcurrentNetwork

		public FloatFastConcurrentNetwork(	int biasNeuronCount, 
										int inputNeuronCount,
                                        int outputNeuronCount,
                                        int outputsPerPolicy, // Schrum: Added
										int totalNeuronCount,
										FloatFastConnection[] connectionArray, 
										IActivationFunction[] activationFnArray)
		{
			this.biasNeuronCount = biasNeuronCount;
			this.inputNeuronCount = inputNeuronCount;
			this.totalInputNeuronCount = biasNeuronCount + inputNeuronCount;
            this.outputNeuronCount = outputNeuronCount;
            this.outputsPerPolicy = outputsPerPolicy; // Schrum: Added

			this.connectionArray = connectionArray;
			this.activationFnArray = activationFnArray;
			
			//----- Allocate the arrays that make up the neural network.
			// The neuron signals are initialised to 0 by default. Only bias nodes need setting to 1.
			neuronSignalArray = new float[totalNeuronCount];
			_neuronSignalArray = new float[totalNeuronCount];

			for(int i=0; i<biasNeuronCount; i++)
				neuronSignalArray[i] = 1.0F;
		}
开发者ID:val1kus,项目名称:agent_multimodal,代码行数:25,代码来源:FloatFastConcurrentNetwork.cs


示例15: ActivationLayer

		/// <summary>
		/// Initializes a new instance of the <see cref="ActivationLayer"/> class.
		/// </summary>
		/// 
		/// <param name="neuronsCount">Layer's neurons count.</param>
		/// <param name="inputsCount">Layer's inputs count.</param>
		/// <param name="function">Activation function of neurons of the layer.</param>
		/// 
		/// <remarks>The new layer is randomized (see <see cref="ActivationNeuron.Randomize"/>
		/// method) after it is created.</remarks>
		/// 
		public ActivationLayer( int neuronsCount, int inputsCount, IActivationFunction function )
			: base( neuronsCount, inputsCount )
		{
			// create each neuron
			for ( int i = 0; i < neurons.Length; i++ )
				neurons[i] = new ActivationNeuron( inputsCount, function );
		}
开发者ID:holisticware-admin,项目名称:MonoVersal.AForgeNET,代码行数:18,代码来源:ActivationLayer.cs


示例16: SetActivationFunction

 /// <summary>
 /// Set new activation function for all neurons of the network.
 /// </summary>
 /// 
 /// <param name="function">Activation function to set.</param>
 /// 
 /// <remarks><para>The method sets new activation function for all neurons by calling
 /// <see cref="ActivationLayer.SetActivationFunction"/> method for each layer of the network.</para></remarks>
 /// 
 public void SetActivationFunction( IActivationFunction function )
 {
     for ( int i = 0; i < layers.Length; i++ )
     {
         ( (ActivationLayer) layers[i] ).SetActivationFunction( function );
     }
 }
开发者ID:holisticware-admin,项目名称:MonoVersal.AForgeNET,代码行数:16,代码来源:ActivationNetwork.cs


示例17: ActivationNeuron

        public ActivationNeuron(double bias, IActivationFunction activationFunction)
        {
            Contract.Requires(activationFunction != null);

            ActivationFunction = activationFunction;
            Bias = bias;
        }
开发者ID:nagyistoce,项目名称:Neuroflow,代码行数:7,代码来源:ActivationNeuron.cs


示例18: Substrate

        public Substrate(uint input, uint output, uint hidden, IActivationFunction function)
        {
            weightRange = HyperNEATParameters.weightRange;
            threshold = HyperNEATParameters.threshold;

            inputCount = input;
            outputCount = output;
            hiddenCount = hidden;
            activationFunction = function;

            inputDelta = 2.0f / (inputCount);
            if (hiddenCount != 0)
                hiddenDelta = 2.0f / (hiddenCount);
            else
                hiddenDelta = 0;
            outputDelta = 2.0f / (outputCount);

            //SharpNEAT requires that the neuronlist be input|bias|output|hidden
            neurons=new NeuronGeneList((int)(inputCount + outputCount+ hiddenCount));
            //setup the inputs
            for (uint a = 0; a < inputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, activationFunction));
            }

            //setup the outputs
            for (uint a = 0; a < outputCount; a++)
            {
                neurons.Add(new NeuronGene(a + inputCount, NeuronType.Output, activationFunction));
            }
            for (uint a = 0; a < hiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + inputCount+outputCount, NeuronType.Hidden, activationFunction));
            }
        }
开发者ID:coastwise,项目名称:HyperSharpNEAT,代码行数:35,代码来源:Substrate.cs


示例19: FlatLayer

 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 public FlatLayer(IActivationFunction activation, int count,
     double biasActivation)
 {
     Activation = activation;
     _count = count;
     _biasActivation = biasActivation;
     _contextFedBy = null;
 }
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:15,代码来源:FlatLayer.cs


示例20: ActivationFunctionInfo

 /// <summary>
 /// Construct with the provided id, selection probability and activation function.
 /// </summary>
 public ActivationFunctionInfo(int id, 
                               double selectionProbability,
                               IActivationFunction activationFn)
 {
     _id = id;
     _selectionProbability = selectionProbability;
     _activationFn = activationFn;
 }
开发者ID:BLueders,项目名称:SharpNeat_Playground,代码行数:11,代码来源:ActivationFunctionInfo.cs



注:本文中的IActivationFunction类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# IActivator类代码示例发布时间:2022-05-24
下一篇:
C# IActivationContext类代码示例发布时间:2022-05-24
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap