• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python structure.FeedForwardNetwork类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中pybrain.structure.FeedForwardNetwork的典型用法代码示例。如果您正苦于以下问题:Python FeedForwardNetwork类的具体用法?Python FeedForwardNetwork怎么用?Python FeedForwardNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了FeedForwardNetwork类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: __init__

class MyNet:

	def __init__(self, file='config.xml'):
		self.net = FeedForwardNetwork()
		self.file = file


	def constructNet(self, input, hidden, output): 
		inputLayer = LinearLayer(input)
		hiddenLayer = TanhLayer(hidden)
		outputLayer = LinearLayer(output)

		self.net.addInputModule(inputLayer)
		self.net.addModule(hiddenLayer)
		self.net.addOutputModule(outputLayer)

		conn1 = FullConnection(inputLayer, hiddenLayer)
		conn2 = FullConnection(hiddenLayer, outputLayer)

		self.net.addConnection(conn1)
		self.net.addConnection(conn2)

	
	def setup(self):
		self.net.sortModules()

	
	def saveToFile(self,file='config.xml'):
		NetworkWriter.writeToFile(self.net, file)


	def loadFromFile(self, file='config.xml'):
		self.net = NetworkReader.readFrom(file)
开发者ID:jszum,项目名称:TranslatorNN,代码行数:33,代码来源:network.py


示例2: initMaxentNetwork

def initMaxentNetwork():
  """Builds a network with just a sigmoid output layer, i.e. a multi-class maximum entropy model."""
  fnn = FeedForwardNetwork()
  inLayer = LinearLayer(numFeatures)
  fnn.addInputModule(inLayer)
  outLayer = SigmoidLayer(3)
  fnn.addOutputModule(outLayer)
  fnn.addConnection(FullConnection(inLayer, outLayer))
  fnn.sortModules()
  return fnn
开发者ID:acvogel,项目名称:discriminative-ibr,代码行数:10,代码来源:discrim_ibr.py


示例3: __init__

  def __init__(self, num_input, num_hidden, num_output):
      # self.net = buildNetwork(num_input, num_hidden, num_output, bias = True)
    self.net = FeedForwardNetwork()

    self.num_input = num_input
    self.num_hidden = num_hidden
    self.num_output = num_output

    inLayer = LinearLayer(num_input, name='in')
    hiddenLayer1 = SigmoidLayer(num_hidden, name='hidden1')
    outLayer = LinearLayer(num_output, name='out')

    self.net.addInputModule(inLayer)
    self.net.addModule(hiddenLayer1)
    self.net.addOutputModule(outLayer)

    self.in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    self.hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    self.net.addConnection(self.in_to_hidden)
    self.net.addConnection(self.hidden_to_out)

    self.net.sortModules()

    self.dataset = None
开发者ID:autekroy,项目名称:CS-275-Cooperative-Hunting-Simulation,代码行数:25,代码来源:NNW.py


示例4: __init__

    def __init__(self, train_data, hyper,  n_targets=None, label_targets=None):
        """
    ------------

    train_data: pandas DataFrame
                Contains columns for features and for target variables. The names of the target variables ends
                with the suffix "_tau"
    hyper:      dictionary
                It contains the hyperparameters necessary to run all the functionalities of the model.
                 They are the following:
                "structure" is a list of integers determining the number of neurons in each hidden layer
                "epochs" an integer specifying the maximum number of epochs to run during every training session
                "learning_rate" a float giving the learning rate of the gradient descend
                "momentum" a float giving the value of the momentum for the algorithm
                "batch" a bool. If True the method performs full batch learning, i.e. updates of the weights is done
                using all the instances of the training set. Else, normal online method is performed
                Other parameters regarding cross validation are explained in the base class

        """
        Regression.__init__(self, train_data, hyper, n_targets=n_targets, label_targets=label_targets)

        self.N = FeedForwardNetwork()
        self.structure = [self.n_feature] + hyper['structure'] + [self.n_target]

        self._build_net(self.structure)
        self.res_params = [self.N.params[i] for i in range(len(self.N.params))]

        self.train_fraction = hyper['train_fraction']
        self.seed = hyper['seed']
        self.epochs = hyper['epochs']
        self.learning_rate = hyper['learning_rate']
        self.momentum = hyper['momentum']
        self.batch = bool(hyper['batch'])
开发者ID:Ambrosys,项目名称:climatelearn,代码行数:33,代码来源:pybrain_MP.py


示例5: __init__

	def __init__(self, genes=None):
		self.net = FeedForwardNetwork()

		inLayer = LinearLayer(Brain.G_INPUTNODES, name='input')
		hiddenLayer1 = SigmoidLayer(Brain.G_HIDDENNODES_L1, name='hidden1')
		hiddenLayer2 = SigmoidLayer(Brain.G_HIDDENNODES_L2, name='hidden2')
		outLayer = SigmoidLayer(Brain.G_OUTPUTNODES, name='out')
		bias = BiasUnit(name='bias')

		self.net.addInputModule(inLayer)
		self.net.addModule(hiddenLayer1)
		self.net.addModule(hiddenLayer2)
		self.net.addModule(bias)
		self.net.addOutputModule(outLayer)

		in_to_hidden1 = FullConnection(inLayer, hiddenLayer1)
		hidden1_to_hidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
		hidden2_to_out = FullConnection(hiddenLayer2, outLayer)
		bias_to_hidden1 = FullConnection(bias, hiddenLayer1)
		bias_to_hidden2 = FullConnection(bias, hiddenLayer2)
		bias_to_out = FullConnection(bias, outLayer)
		
		self.net.addConnection(in_to_hidden1)
		self.net.addConnection(hidden1_to_hidden2)
		self.net.addConnection(hidden2_to_out)
		self.net.addConnection(bias_to_hidden1)
		self.net.addConnection(bias_to_hidden2)
		self.net.addConnection(bias_to_out)

		self.net.sortModules()

		if genes != None:
			self.import_genes(genes)
开发者ID:andy071001,项目名称:artificialbrains,代码行数:33,代码来源:brain2.py


示例6: PrepareModel

	def PrepareModel(self, savedmodel = None):
		
		if savedmodel != None:
			self.trainer = savedmodel
		else:
			attributescount=len(self.traindata[0])
			self.ds = SupervisedDataSet(attributescount, 1)
			for i in range(len(self.traindata)):
				self.ds.appendLinked(self.traindata[i], self.trainlabel[i])
		
			self.net = FeedForwardNetwork()
			inLayer = LinearLayer(len(self.traindata[0]))
			self.net.addInputModule(inLayer)
			hiddenLayers=[]
			for i in range(self.hiddenlayerscount):
				hiddenLayer=SigmoidLayer(self.hiddenlayernodescount)
				hiddenLayers.append(hiddenLayer)
				self.net.addModule(hiddenLayer)
			outLayer = LinearLayer(1)
			self.net.addOutputModule(outLayer)
		
			layers_connections=[]
			layers_connections.append(FullConnection(inLayer, hiddenLayers[0]))
			for i in range(self.hiddenlayerscount-1):
				layers_connections.append(FullConnection(hiddenLayers[i-1], hiddenLayers[i]))
			layers_connections.append(FullConnection(hiddenLayers[-1], outLayer))
		
			for layers_connection in layers_connections:
				self.net.addConnection(layers_connection)
			self.net.sortModules()
			
			#training the self.network
			self.trainer = BackpropTrainer(self.net, self.ds)
			self.trainer.train()
开发者ID:drossegger,项目名称:ml-ex1,代码行数:34,代码来源:neuralnetworkregression.py


示例7: __init__

    def __init__(self, hidden_layers, data_index_size):

        self.network = FeedForwardNetwork()

        connect_queue = Queue.Queue()
        
        for layer in xrange(0, hidden_layers):
            connect_queue.put(TanhLayer(data_index_size, name = 'hidden_layer_{}'.format(layer)))

        connect_queue.put(SigmoidLayer(1, name = 'output_layer'))

        prev_layer = LinearLayer(data_index_size, name = 'input_layer')
        self.network.addInputModule(prev_layer)
        
        while not connect_queue.empty():
            print 'layer'
            current_layer = connect_queue.get()
            if current_layer.name == 'output_layer':
                self.network.addOutputModule(current_layer)
            else:
                self.network.addModule(current_layer)

            bias = BiasUnit()
            bias_connection = FullConnection(bias, current_layer, name = "bias_to_{}_connection".format(current_layer.name))
            self.network.addModule(bias)
            self.network.addConnection(bias_connection)
            
            connection = FullConnection(prev_layer, current_layer, name = "{}_to_{}_connection".format(prev_layer.name, current_layer.name))
            self.network.addConnection(connection)
            
            prev_layer = current_layer

        print 'sorting....'
        self.network.sortModules()
开发者ID:chazly321,项目名称:timbad,代码行数:34,代码来源:manager.py


示例8: __init__

 def __init__(self, x, y, direction):
   self.age = 0
   # position
   self.x = x
   self.y = y
   # number of going back and forth for different foods
   self.backForth = 0
   self.LastFood = None # the last food animat ate
   # orientation (0 - 359 degrees)
   self.direction = direction
   # carrying food
   self.food = None
   # touching anything
   self.touching = None
   self.sees = None
   # hunger sensor
   self.fruit_hunger = 2000
   self.veggie_hunger = 2000
   self.avg_fruit_hunger = 0
   self.avg_veggie_hunger = 0
   # neural net
   self.net = FeedForwardNetwork()
   self.net.addInputModule(LinearLayer(12, name='in'))
   self.net.addModule(SigmoidLayer(13, name='hidden'))
   self.net.addOutputModule(LinearLayer(6, name='out'))
   self.net.addConnection(FullConnection(self.net['in'], self.net['hidden']))
   self.net.addConnection(FullConnection(self.net['hidden'], self.net['out']))
   self.net.sortModules()
   # thresholds for deciding an action
   self.move_threshold = 0
   self.pickup_threshold = 0
   self.putdown_threshold = 0
   self.eat_threshold = 0
开发者ID:autekroy,项目名称:Import-Export-Animats,代码行数:33,代码来源:animats.py


示例9: __init__

  def __init__(self, input_size, output_size, number_of_layers=3, size_of_hidden_layers=3, type_of_hidden_layer='sigmoid', net_bias=False, epochs=100):
    self.net = FeedForwardNetwork()
    self.num_epochs = epochs
    # set up layers of the network
    layers = []

    for i in range(number_of_layers):
      if i == 0:
        layers.append(LinearLayer(input_size))
        self.net.addInputModule(layers[i])
      elif i == (number_of_layers-1):
        layers.append(LinearLayer(output_size))
        self.net.addOutputModule(layers[i])
        self.net.addConnection(FullConnection(layers[i-1], layers[i]))
      else:
        if type_of_hidden_layer == 'linear':
          layers.append(LinearLayer((input_size + output_size) / 2))
        elif type_of_hidden_layer == 'sigmoid':
          layers.append(SigmoidLayer((input_size + output_size) / 2))
        elif type_of_hidden_layer == 'tanh':
          layers.append(TanhLayer((input_size + output_size) / 2))
        self.net.addModule(layers[i])
        self.net.addConnection(FullConnection(layers[i-1], layers[i]))

    self.net.sortModules()
    self.input_size = input_size
    self.output_size = output_size
开发者ID:tlubbers,项目名称:ilstu-honors-neuralnetwork-src,代码行数:27,代码来源:honors_net.py


示例10: __init__

    def __init__(self, grid_size, hidden_list):
        """Sets up the neural network.

        @param grid_size: the size of the grid, for specifying the input layer.
        @param hidden_list: a list containing the number of nodes in each hidden layer.
        """
        self.net = FeedForwardNetwork()

        in_layer = LinearLayer(grid_size*grid_size)
        self.net.addInputModule(in_layer)
        out_layer = LinearLayer(4)
        self.net.addOutputModule(out_layer)

        hidden_layers = []
        for i in hidden_list:
            hidden_layer = SigmoidLayer(i)
            hidden_layers.append(hidden_layer)
            self.net.addModule(hidden_layer)

        self.net.addConnection(FullConnection(in_layer, hidden_layers[0]))
        if len(hidden_layers) > 1:
            for i in range(1, len(hidden_layers) - 1):
                self.net.addConnection(FullConnection(hidden_layers[i], hidden_layers[i+1]))
        self.net.addConnection(FullConnection(hidden_layers[-1], out_layer))

        self.net.sortModules()
开发者ID:jwrm2,项目名称:2048,代码行数:26,代码来源:controller.py


示例11: _new_1h_net

def _new_1h_net(window):
    net     = FeedForwardNetwork()
    inl     = SigmoidLayer(window*window*2+1)
    hidden1 = SigmoidLayer(window*window*2)
    outl    = SigmoidLayer(1)
    net.addInputModule(inl)
    net.addModule(hidden1)
    net.addOutputModule(outl)
    c1 = FullConnection(inl, hidden1)
    c2 = FullConnection(hidden1, outl)
    net.addConnection(c1)
    net.addConnection(c2)
    return net
开发者ID:majek,项目名称:transfer,代码行数:13,代码来源:network.py


示例12: simple_network

def simple_network(data, digit, train_ds, test_ds):
#     n = buildNetwork(train_ds.indim, 1, train_ds.outdim, outclass=SoftmaxLayer)
    n = FeedForwardNetwork()
    inLayer = LinearLayer(64)
    outLayer = SoftmaxLayer(10)
    n.addInputModule(inLayer)
    n.addOutputModule(outLayer)
    n.addConnection(FullConnection(inLayer, outLayer))
    n.sortModules()
    trainer = BackpropTrainer(n, dataset=train_ds, momentum=0.1, verbose=True,
                              weightdecay=0.01)
    trainer.trainUntilConvergence(maxEpochs=25)
    result = percentError(trainer.testOnClassData(dataset=test_ds),
                          test_ds['class'])
#     result = validate(trainer, train_ds, 5, 10)
    print 'Simple network - Percent Error', result
    return result
开发者ID:jac241,项目名称:Machine-Learning-Neural-Network,代码行数:17,代码来源:nnexperiment.py


示例13: __init__

  def __init__(self, hidden_neuron_num=1, hidden_type='sigmoid'):
    self.hidden_neuron_num = hidden_neuron_num
    self.hidden_type = hidden_type

    self.net = FeedForwardNetwork()
    self.samples = SupervisedDataSet(784, 784)

    self.vectorizer = ImageVectorizer()

    self.add_layers()
    self.add_connections()
    self.sort()
开发者ID:joshsilverman,项目名称:neuralnet,代码行数:12,代码来源:neural_net.py


示例14: __init__

	def __init__(self, num_features, num_hidden_neurons):
		super(NNet,self).__init__(num_features)

		self.ds = SupervisedDataSet(num_features, 1)

		self.net = FeedForwardNetwork()
		self.net.addInputModule(LinearLayer(num_features, name='in'))
		self.net.addModule(LinearLayer(num_hidden_neurons, name='hidden'))
		self.net.addOutputModule(LinearLayer(1, name='out'))
		self.net.addConnection(FullConnection(self.net['in'], self.net['hidden'], name='c1'))
		self.net.addConnection(FullConnection(self.net['hidden'], self.net['out'], name='c2'))
		self.net.sortModules()
开发者ID:rahul003,项目名称:rl_page_replacement,代码行数:12,代码来源:approximator.py


示例15: train

    def train(self):

        n = FeedForwardNetwork()

        dataModel = SongFactory(self.major).getModels()

        ds = SupervisedDataSet(static.NUM_OF_INPUTS, 1)

        #adds samples from the data received from songfactory and the k
        for data in dataModel:
            for input, target in data.model:
                print input, target
                ds.addSample(input, target)


        #instantiate the network
        self.net = FeedForwardNetwork()
        bias = BiasUnit()
        self.net.addModule(bias)

        #create the layers of the network
        inLayer = LinearLayer(static.NUM_OF_INPUTS)
        outLayer = LinearLayer(1)
        hidden1 = SigmoidLayer(25)
        hidden2 = SigmoidLayer(5)

        #add the layers
        self.net.addInputModule(inLayer)
        self.net.addOutputModule(outLayer)
        self.net.addModule(hidden1)
        self.net.addModule(hidden2)

        #create the connection
        in_h1 = FullConnection(inLayer,hidden1)
        h1_h2 = FullConnection(hidden1, hidden2)
        h2_out = FullConnection(hidden2, outLayer)
        b_h1  = FullConnection(bias, hidden1)
        b_h2  = FullConnection(bias, hidden2)

        #add the connection
        self.net.addConnection(in_h1)
        self.net.addConnection(h1_h2)
        self.net.addConnection(h2_out)
        self.net.addConnection(b_h1)
        self.net.addConnection(b_h2)

        self.net.sortModules()

        #trainer to edit the network
        trainer = BackpropTrainer(self.net, ds, learningrate = 0.003)

        trainer.trainEpochs(25)
开发者ID:davepagurek,项目名称:Chordi.co,代码行数:52,代码来源:learn.py


示例16: narcolepsy

    def narcolepsy(self, naps, awakenings, obesity):
        parameters = [naps, awakenings, obesity]

        # Init network
        network = FeedForwardNetwork()
        # Init Layers
        inLayer = LinearLayer(3)
        outLayer = LinearLayer(1)
        # Init connection
        in_to_out = FullConnection(inLayer, outLayer)
        # Add modules
        network.addInputModule(inLayer)
        network.addInputModule(outLayer)
        # Add connections
        network.addConnection(in_to_out)
        # Sort
        network.sortModules()
        # Set equal weights
        # TODO: Use learning to learn weights over time
        # in_to_out._setParameters([.1,.1,.1])
        probability = network.activate(parameters)[0]

        return probability
开发者ID:JCDJulian,项目名称:forty-winks,代码行数:23,代码来源:neural_network.py


示例17: insomnia

    def insomnia(self, falling_asleep, awakenings, cant_fall_back, low_sleep_hours):
        parameters = [falling_asleep, waking_up, cant_fall_back, low_sleep_hours]

        # Init network
        network = FeedForwardNetwork()
        # Init Layers
        inLayer = LinearLayer(4)
        outLayer = LinearLayer(1)
        # Init connection
        in_to_out = FullConnection(inLayer, outLayer)
        # Add modules
        network.addInputModule(inLayer)
        network.addInputModule(outLayer)
        # Add connections
        network.addConnection(in_to_out)
        # Sort
        network.sortModules()
        # Set equal weights
        # TODO: Use learning to learn weights over time
        # in_to_out._setParameters([.1,.1,.1,.1])
        probability = network.activate(parameters)[0]

        return probability
开发者ID:JCDJulian,项目名称:forty-winks,代码行数:23,代码来源:neural_network.py


示例18: __init__

	def __init__(self,layer_type):
		self.inputLayer = LinearLayer(2)
		self.hiddenLayer = layer_type(10)
		self.outputLayer = layer_type(2)

		self.net = FeedForwardNetwork()
		self.net.addInputModule(self.inputLayer)
		self.net.addModule(self.hiddenLayer)
		self.net.addOutputModule(self.outputLayer)

		self.inputToHidden = FullConnection(self.inputLayer,self.hiddenLayer)
		self.hiddenToOutput = FullConnection(self.hiddenLayer,self.outputLayer)

		self.net.addConnection(self.inputToHidden)
		self.net.addConnection(self.hiddenToOutput)

		self.net.sortModules()
开发者ID:makslevental,项目名称:school_work,代码行数:17,代码来源:ann.py


示例19: construct

    def construct(self, sensor_states, behaviors):

        input_len = len(sensor_states[0])
        state_len = input_len + len(self._behavior_to_list(''))

        # Initialize the network
        self.net = FeedForwardNetwork()

        input_layer = SigmoidLayer(state_len)
        hidden_layer = SigmoidLayer( int(state_len * 1.5) )
        output_layer = SigmoidLayer(input_len)

        input_to_hidden = FullConnection(input_layer, hidden_layer)
        hidden_to_output = FullConnection(hidden_layer, output_layer)

        self.net.addInputModule( input_layer )
        self.net.addModule( hidden_layer )
        self.net.addOutputModule( output_layer )

        self.net.addConnection(input_to_hidden)
        self.net.addConnection(hidden_to_output)

        self.net.sortModules()

        # Build the data set
        ds = SupervisedDataSet(state_len, input_len)

        previous_state = sensor_states[0]
        for i in range(1, len(sensor_states) - 1):
            behavior = behaviors[i-1]
            current_state = sensor_states[i]

            a = tuple( previous_state + self._behavior_to_list(behavior) )
            b = tuple( current_state )

            ds.addSample(a, b)

        # Train the network
        trainer = BackpropTrainer(self.net, ds, learningrate=0.3)
        for i in range(1000):
            t1 = datetime.datetime.now()
            err = trainer.train()
            t2 = datetime.datetime.now()
            print '%d: %f (%s)' % (i, err, t2 - t1)
            if self.training_callback is not None:
                self.training_callback(i, self)
开发者ID:ungeroed,项目名称:Exploratory-robot-learning,代码行数:46,代码来源:graph.py


示例20: fit

    def fit(self, X, y):
        self.n = FeedForwardNetwork()

        self.n.addInputModule(SigmoidLayer(self.inp_neu, name='in'))
        self.n.addModule(SigmoidLayer(self.hid_neu, name='hidden'))
        self.n.addOutputModule(LinearLayer(self.out_neu, name='out'))
        self.n.addConnection(FullConnection(self.n['in'], self.n['hidden'], name='c1'))
        self.n.addConnection(FullConnection(self.n['hidden'], self.n['out'], name='c2'))

        self.n.sortModules() #initialisation

        self.tstdata, trndata = self.data(X,y).splitWithProportion(self.split_prop)

        trainer = BackpropTrainer(self.n, trndata, learningrate=self.learn_rate, momentum=self.nomentum, weightdecay=self.weight_dec)
        trainer.trainUntilConvergence(verbose=True, maxEpochs=self.epochs)

        return self
开发者ID:pcolo,项目名称:regret,代码行数:17,代码来源:regret_algo2.py



注:本文中的pybrain.structure.FeedForwardNetwork类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python structure.RecurrentNetwork类代码示例发布时间:2022-05-25
下一篇:
Python valuebased.ActionValueTable类代码示例发布时间:2022-05-25
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap