• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python feedforward.FeedForwardNetwork类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中pybrain.structure.networks.feedforward.FeedForwardNetwork的典型用法代码示例。如果您正苦于以下问题:Python FeedForwardNetwork类的具体用法?Python FeedForwardNetwork怎么用?Python FeedForwardNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了FeedForwardNetwork类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: __init__

    def __init__(self, predefined = None, **kwargs):
        """ For the current implementation, the sequence length
        needs to be fixed, and given at construction time. """
        if predefined is not None:
            self.predefined = predefined
        else:
            self.predefined = {}
        FeedForwardNetwork.__init__(self, **kwargs)
        assert self.seqlen is not None

        # the input is a 1D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')

        # the output is also a 1D-mesh
        outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')

        # the hidden layers are places in a 2xseqlen mesh
        hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
                                                    (2, self.seqlen), 'hidden')

        # add the modules
        for c in inmesh:
            self.addInputModule(c)
        for c in outmesh:
            self.addOutputModule(c)
        for c in hiddenmesh:
            self.addModule(c)

        # set the connections weights to be shared
        inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
        outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
        forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
        if self.symmetric:
            backwardconn = forwardconn
            inconnb = inconnf
            outconnb = outconnf
        else:
            backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
            inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
            outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')

        # build the connections
        for i in range(self.seqlen):
            # input to hidden
            self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
            self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
            # hidden to output
            self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
            self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
            if i > 0:
                # forward in time
                self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
            if i < self.seqlen - 1:
                # backward in time
                self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))

        self.sortModules()
开发者ID:Angeliqe,项目名称:pybrain,代码行数:59,代码来源:bidirectional.py


示例2: createNet

def createNet():
    net = FeedForwardNetwork()
    modules = add_modules(net)
    add_connections(net, modules)
    # finish up
    net.sortModules()
    gradientCheck(net)
    return net
开发者ID:lbvienna,项目名称:compare_documents,代码行数:8,代码来源:neuralNet.py


示例3: buildSlicedNetwork

def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name = 'a')
    b = LinearLayer(2, name = 'b')
    N.addInputModule(a)
    N.addOutputModule(b)
    
    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
开发者ID:HKou,项目名称:pybrain,代码行数:13,代码来源:test_sliced_connections.py


示例4: __init__

 def __init__(self, boardSize, convSize, numFeatureMaps, **args):
     inputdim = 2
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim*boardSize*boardSize, name = 'in')
     self.addInputModule(inlayer)
     
     # we need some treatment of the border too - thus we pad the direct board input.
     x = convSize/2
     insize = boardSize+2*x
     if convSize % 2 == 0: 
         insize -= 1            
     paddedlayer = LinearLayer(inputdim*insize*insize, name = 'pad')
     self.addModule(paddedlayer)
     
     # we connect a bias to the padded-parts (with shared but trainable weights).
     bias = BiasUnit()
     self.addModule(bias)
     biasConn = MotherConnection(inputdim)
     
     paddable = []
     if convSize % 2 == 0: 
         xs = range(x)+range(insize-x+1, insize)
     else:
         xs = range(x)+range(insize-x, insize)
     paddable.extend(crossproduct([range(insize), xs]))
     paddable.extend(crossproduct([xs, range(x, boardSize+x)]))
     
     for (i, j) in paddable:
         self.addConnection(SharedFullConnection(biasConn, bias, paddedlayer, 
                                                 outSliceFrom = (i*insize+j)*inputdim, 
                                                 outSliceTo = (i*insize+j+1)*inputdim))
             
     for i in range(boardSize):
         inmod = ModuleSlice(inlayer, outSliceFrom = i*boardSize*inputdim, 
                             outSliceTo = (i+1)*boardSize*inputdim)
         outmod = ModuleSlice(paddedlayer, inSliceFrom = ((i+x)*insize+x)*inputdim, 
                              inSliceTo = ((i+x)*insize+x+boardSize)*inputdim)
         self.addConnection(IdentityConnection(inmod, outmod))
         
     self._buildStructure(inputdim, insize, paddedlayer, convSize, numFeatureMaps)
     self.sortModules()
                     
开发者ID:ZachPhillipsGary,项目名称:CS200-NLP-ANNsProject,代码行数:41,代码来源:convboard.py


示例5: training

    def training(self,d):
        """
        Builds a network ,trains and returns it
        """

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(4) # 4 inputs
        hiddenLayer = SigmoidLayer(3) # 5 neurons on hidden layer with sigmoid function
        outLayer = LinearLayer(2) # 2 neuron as output layer


        "add layers to NN"
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        "create connections"
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        "add connections"
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        "some unknown but necessary function :)"
        self.net.sortModules()

        print self.net

        "generate big sized training set"
        trainingSet = SupervisedDataSet(4,2)

        trainArr = self.generate_training_set()
        for ri in range(2000):
            input = ((trainArr[0][ri][0],trainArr[0][ri][1],trainArr[0][ri][2],trainArr[0][ri][3]))
            target = ((trainArr[1][ri][0],trainArr[1][ri][1]))
            trainingSet.addSample(input, target)

        "create backpropogation trainer"
        t = BackpropTrainer(self.net,d,learningrate=0.00001, momentum=0.99)
        while True:
            globErr = t.train()
            print "global error:", globErr
            if globErr < 0.0001:
                break

        return self.net
开发者ID:MFarida,项目名称:NEUCOGAR,代码行数:48,代码来源:Main.py


示例6: __init__

    def __init__(self, x_dim, y_dim, hidden_size, s_id):
        self.serialize_id = s_id
        self.net = FeedForwardNetwork()

        in_layer = LinearLayer(x_dim)
        hidden_layer = SigmoidLayer(hidden_size)
        out_layer = LinearLayer(y_dim)
        self.net.addInputModule(in_layer)
        self.net.addModule(hidden_layer)
        self.net.addOutputModule(out_layer)

        in_to_hidden = FullConnection(in_layer, hidden_layer)
        hidden_to_out = FullConnection(hidden_layer, out_layer)
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        self.net.sortModules()
开发者ID:erdincay,项目名称:ScoreGrass,代码行数:17,代码来源:PyBrainANNs.py


示例7: _generate_pybrain_network

 def _generate_pybrain_network(self):
     # make network
     self._pybrain_network = FeedForwardNetwork()
     # make layers
     self._in_layer = LinearLayer(self.n_input_neurons, name='in')
     self._hidden_layer = SigmoidLayer(self.n_hidden_neurons, name='hidden')
     self._out_layer = LinearLayer(self.n_output_neurons, name='out')
     self._bias_neuron = BiasUnit(name='bias')
     # make connections between layers
     self._in_hidden_connection = FullConnection(self._in_layer, self._hidden_layer)
     self._hidden_out_connection = FullConnection(self._hidden_layer, self._out_layer)
     self._bias_hidden_connection = FullConnection(self._bias_neuron, self._hidden_layer)
     self._bias_out_connection = FullConnection(self._bias_neuron, self._out_layer)
     # add modules to network
     self._pybrain_network.addInputModule(self._in_layer)
     self._pybrain_network.addModule(self._hidden_layer)
     self._pybrain_network.addOutputModule(self._out_layer)
     self._pybrain_network.addModule(self._bias_neuron)
     # add connections to network
     for c in (self._in_hidden_connection, self._hidden_out_connection, self._bias_hidden_connection, self._bias_out_connection):
         self._pybrain_network.addConnection(c)
     # initialize network with added modules/connections
     self._pybrain_network.sortModules()
开发者ID:LocusCoeruleus,项目名称:netwhisperer,代码行数:23,代码来源:network.py


示例8: _buildNetwork

def _buildNetwork(*layers, **options):
    """This is a helper function to create different kinds of networks.

    `layers` is a list of tuples. Each tuple can contain an arbitrary number of
    layers, each being connected to the next one with IdentityConnections. Due
    to this, all layers have to have the same dimension. We call these tuples
    'parts.'

    Afterwards, the last layer of one tuple is connected to the first layer of
    the following tuple by a FullConnection.

    If the keyword argument bias is given, BiasUnits are added additionally with
    every FullConnection.

    Example:

        _buildNetwork(
            (LinearLayer(3),),
            (SigmoidLayer(4), GaussianLayer(4)),
            (SigmoidLayer(3),),
        )
    """
    bias = options['bias'] if 'bias' in options else False

    net = FeedForwardNetwork()
    layerParts = iter(layers)
    firstPart = iter(layerParts.next())
    firstLayer = firstPart.next()
    net.addInputModule(firstLayer)

    prevLayer = firstLayer

    for part in chain(firstPart, layerParts):
        new_part = True
        for layer in part:
            net.addModule(layer)
            # Pick class depending on whether we entered a new part
            if new_part:
                ConnectionClass = FullConnection
                if bias:
                    biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
                    net.addModule(biasUnit)
                    net.addConnection(FullConnection(biasUnit, layer))
            else:
                ConnectionClass = IdentityConnection
            new_part = False
            conn = ConnectionClass(prevLayer, layer)
            net.addConnection(conn)
            prevLayer = layer
    net.addOutputModule(layer)
    net.sortModules()
    return net
开发者ID:Boblogic07,项目名称:pybrain,代码行数:52,代码来源:shortcuts.py


示例9: __init__

 def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize, numFeatureMaps)
     self.sortModules()
开发者ID:Angeliqe,项目名称:pybrain,代码行数:6,代码来源:convolutional.py


示例10: __init__

    def __init__(self, states, verbose=False, max_epochs=None):
        '''Create a NeuralNetwork instance.

        `states` is a tuple of tuples of ints, representing the discovered subnetworks'
        entrez ids.
        '''
        self.verbose         = verbose
        self.max_epochs      = max_epochs
        self.num_features    = sum(map(lambda tup: len(tup), states))
        self.states          = states

        n = FeedForwardNetwork()
        n.addOutputModule(TanhLayer(1, name='out'))
        n.addModule(BiasUnit(name='bias out'))
        n.addConnection(FullConnection(n['bias out'], n['out']))

        for i, state in enumerate(states):
            dim = len(state)
            n.addInputModule(TanhLayer(dim, name='input %s' % i))
            n.addModule(BiasUnit(name='bias input %s' % i))
            n.addConnection(FullConnection(n['bias input %s' % i], n['input %s' % i]))
            n.addConnection(FullConnection(n['input %s' % i], n['out']))

        n.sortModules()
        self.n = n
开发者ID:mrorii,项目名称:crane,代码行数:25,代码来源:neural_network.py


示例11: generate_training_set

class MLP:

    data = SupervisedDataSet
    net = FeedForwardNetwork

    def generate_training_set(self):
        random.seed()
        ind = floor(empty((2000,4)))
        outd = floor(empty((2000, 2)))

        res = array((ind,outd))

        print ind
        print
        print outd
        print
        print res

        for i in range(2000):
            n = random.getrandbits(1)
            if n == 0:
                a = random.randint(0,100)
                b = random.randint(0,100)
                c = random.randint(100,5000)
                d = random.randint(100,5000)
                res[0][i][0] = a
                res[0][i][1] = b
                res[0][i][2] = c
                res[0][i][3] = d

                res[1][i][0] = 0
                res[1][i][1] = 1

            else:
                a = random.randint(100,5000)
                b = random.randint(100,5000)
                c = random.randint(0,100)
                d = random.randint(0,100)
                res[0][i][0] = a
                res[0][i][1] = b
                res[0][i][2] = c
                res[0][i][3] = d

                res[1][i][0] = 1
                res[1][i][1] = 0

        for i in range(2000):
            print res[0][i][0],res[0][i][1],res[0][i][2],res[0][i][3], " out", res[1][i][0],res[1][i][1]
        return res

    def getFullDataSet(self):
        res = zeros((50**4, 4))
        a = 0
        b = 0
        c = 0
        d = 0
        for i in range(len(res)):
            if (a % 50 == 0):
                a = 0
            a = a + 1
            if (i % 2 == 0):
                if (b % 50 == 0):
                    b = 0
                b = b + 1

            if (i % 4 == 0):
                if (c % 50 == 0):
                    c = 0
                c = c + 1
            if (i % 8 ==0):
                if (d % 50 == 0):
                    d = 0
                d = d + 1
            res[i][0] = a
            res[i][1] = b
            res[i][2] = c
            res[i][3] = d

        res += 75

        return res

    def make_dataset(self):
        """
        Creates a set of training data with 2-dimensioanal input and 2-dimensional output
        So how dataset have to be looks like?
        """
        self.data = SupervisedDataSet(4,2)

        self.data.addSample((1,1,150,150),(0,1))
        self.data.addSample((1,1,199,142),(0,1))
        self.data.addSample((150,120,43,12),(1,0))
        self.data.addSample((198,123,54,65),(1,0))

        return self.data


    def training(self,d):
        """
        Builds a network ,trains and returns it
#.........这里部分代码省略.........
开发者ID:MFarida,项目名称:NEUCOGAR,代码行数:101,代码来源:Main.py


示例12: _build_network

def _build_network():
    logger.info("Building network...")

    net = FeedForwardNetwork()
    inp = LinearLayer(IMG_WIDTH * IMG_HEIGHT * 2)
    h1_image_width = IMG_WIDTH - FIRST_CONVOLUTION_FILTER + 1
    h1_image_height = IMG_HEIGHT - FIRST_CONVOLUTION_FILTER + 1
    h1_full_width = h1_image_width * CONVOLUTION_MULTIPLIER * NUMBER_OF_IMAGES
    h1_full_height = h1_image_height * CONVOLUTION_MULTIPLIER
    h1 = SigmoidLayer(h1_full_width * h1_full_height)

    h2_width = h1_full_width / 2
    h2_height = h1_full_height / 2
    h2 = LinearLayer(h2_width * h2_height)

    h3_image_width = h2_width / CONVOLUTION_MULTIPLIER / NUMBER_OF_IMAGES - SECOND_CONVOLUTION_FILTER + 1
    h3_image_height = h2_height / CONVOLUTION_MULTIPLIER - SECOND_CONVOLUTION_FILTER + 1
    h3_full_width = h3_image_width * (CONVOLUTION_MULTIPLIER * 2) * NUMBER_OF_IMAGES
    h3_full_height = h3_image_height * (CONVOLUTION_MULTIPLIER * 2)
    h3 = SigmoidLayer(h3_full_width * h3_full_height)

    h4_full_width = h3_image_width - MERGE_FILTER
    h4_full_height = h3_image_height - MERGE_FILTER
    h4 = SigmoidLayer(h4_full_width * h4_full_height)

    logger.info("BASE IMG: %d x %d" % (IMG_WIDTH, IMG_HEIGHT))
    logger.info("First layer IMG: %d x %d" % (h1_image_width, h1_image_height))
    logger.info("First layer FULL: %d x %d" % (h1_full_width, h1_full_height))
    logger.info("Second layer FULL: %d x %d" % (h2_width, h2_height))
    logger.info("Third layer IMG: %d x %d" % (h3_image_width, h3_image_height))
    logger.info("Third layer FULL: %d x %d" % (h3_full_width, h3_full_height))
    logger.info("Forth layer FULL: %d x %d" % (h3_image_width, h3_image_height))
    outp = SoftmaxLayer(2)

    h5 = SigmoidLayer(h4_full_width * h4_full_height)

    # add modules
    net.addOutputModule(outp)
    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    net.addModule(h3)
    net.addModule(h4)
    net.addModule(h5)

    # create connections

    for i in range(NUMBER_OF_IMAGES):
        _add_convolutional_connection(
            net=net,
            h1=inp,
            h2=h1,
            filter_size=FIRST_CONVOLUTION_FILTER,
            multiplier=CONVOLUTION_MULTIPLIER,
            input_width=IMG_WIDTH * 2,
            input_height=IMG_HEIGHT,
            output_width=h1_full_width,
            output_height=h1_full_height,
            offset_x=h1_image_width * i,
            offset_y=0,
            size_x=h1_image_width,
            size_y=h1_image_height
        )

    _add_pool_connection(
        net=net,
        h1=h1,
        h2=h2,
        input_width=h1_full_width,
        input_height=h1_full_height
    )

    for i in range(NUMBER_OF_IMAGES * CONVOLUTION_MULTIPLIER):
        for j in range(CONVOLUTION_MULTIPLIER):
            _add_convolutional_connection(
                net=net,
                h1=h2,
                h2=h3,
                filter_size=SECOND_CONVOLUTION_FILTER,
                multiplier=CONVOLUTION_MULTIPLIER,
                input_width=h2_width,
                input_height=h2_height,
                output_width=h3_full_width,
                output_height=h3_full_height,
                offset_x=h3_image_width * i,
                offset_y=h3_image_height * j,
                size_x=h3_image_width,
                size_y=h3_image_height
            )

    _merge_connection(
        net=net,
        h1=h3,
        h2=h4,
        filter_size=MERGE_FILTER,
        input_width=h3_full_width,
        input_height=h3_full_height,
        output_width=h4_full_width,
        output_height=h4_full_height
    )
#.........这里部分代码省略.........
开发者ID:ShadowswordPL,项目名称:PowerRecruiter,代码行数:101,代码来源:neural_network.py


示例13: __init__

 def __init__(self, **args):
     FeedForwardNetwork.__init__(self, **args)
开发者ID:hherman1,项目名称:ConvolutionalNeuralNetwork,代码行数:2,代码来源:CustomConv.py


示例14: buildSubsamplingNetwork

def buildSubsamplingNetwork():
    """ Builds a network with subsampling connections. """
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(6, 'in'))
    n.addOutputModule(LinearLayer(1, 'out'))
    n.addConnection(SubsamplingConnection(n['in'], n['out'], inSliceTo=4))
    n.addConnection(SubsamplingConnection(n['in'], n['out'], inSliceFrom=4))
    n.sortModules()
    return n
开发者ID:davidmiller,项目名称:pybrain,代码行数:9,代码来源:test_subsampling_connection.py


示例15: buildnet

def buildnet(modules):
    net = FeedForwardNetwork(name='mynet');
    net.addInputModule(modules['in'])
    net.addModule(modules['hidden'])
    net.addOutputModule(modules['out'])
    net.addModule(modules['bias'])
    net.addConnection(modules['in_to_hidden'])
    net.addConnection(modules['bias_to_hidden'])
    net.addConnection(modules['bias_to_out'])
    if ('hidden2' in modules):
        net.addModule(modules['hidden2'])
        net.addConnection(modules['hidden_to_hidden2'])
        net.addConnection(modules['bias_to_hidden2'])
        net.addConnection(modules['hidden2_to_out'])
    else:
        net.addConnection(modules['hidden_to_out'])
    net.sortModules()
    return net
开发者ID:gnrhxni,项目名称:CS542,代码行数:18,代码来源:nettalk_modules.py


示例16: buildParity

 def buildParity(self):
     self.params['dataset'] = 'parity'
     self.trn_data = ParityDataSet(nsamples=75)
     self.trn_data.setField('class', self.trn_data['target'])
     self.tst_data = ParityDataSet(nsamples=75)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
开发者ID:mfbx9da4,项目名称:neuron-astrocyte-networks,代码行数:22,代码来源:pybrain_ga.py


示例17: print

ds.addSample((1, 1), (0,))

for input, target in ds:
    print(input, target)
    
#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)

#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
net.addConnection(hiddenTwoToOut)
net.sortModules()

print(net)

trainer = BackpropTrainer(net, ds)

for i in range(20):
    for j in range(1000):               
开发者ID:martinfesser,项目名称:PythonAndAi,代码行数:31,代码来源:feedforwardTutorial.py


示例18: FullConnection

    bias_to_out = FullConnection(biasUnit, outLayer)

    tosave = [ inLayer, hiddenLayer, outLayer, biasUnit, in_to_hidden, hidden_to_out, bias_to_hidden, bias_to_out ];

    return tosave


if (len(sys.argv) <= 3):
    saved = buildNet()
else:
    saved = pickle.load(open(sys.argv[3], "rb"));

pickle.dump( saved, open( "pablosemptynet.p", "wb" ) )


net = FeedForwardNetwork(name='mynet');

net.addInputModule(saved[0])
net.addModule(saved[1])
net.addOutputModule(saved[2])
net.addModule(saved[3])
net.addConnection(saved[4])
net.addConnection(saved[5])
net.addConnection(saved[6])
net.addConnection(saved[7])

net.sortModules()

trainer = BackpropTrainer(net, None, learningrate=lrate, verbose=False, batchlearning=True, weightdecay=wdecay)                                        
stressErrors=list();
phonemeErrors=list();
开发者ID:gnrhxni,项目名称:CS542,代码行数:31,代码来源:pablo_network.py


示例19: phoneme_to_layer

class Network:
    "NETwhisperer neural network"
        
    def phoneme_to_layer(self, phoneme):
        return self.phonemes_to_layers[phoneme]

    def layer_to_phoneme(self, layer):
        def cos_to_input(item):
            phoneme, phoneme_layer = item
            return _cos(layer,phoneme_layer)
        # minimum angle should be maximum cos    
        return max(self.phonemes_to_layers.iteritems(), key=cos_to_input)[0]    

    def __init__(self, window_size, window_middle, n_hidden_neurons):
        self.window_size = window_size
        self.window_middle = window_middle
        self.n_hidden_neurons = n_hidden_neurons
        self.n_trainings = 0
        self.training_errors = []
        self._init_layers()
        self._generate_pybrain_network()
        
    def _init_layers(self):
        # one neuron for each window/letter combination
        self.letter_neuron_names = list(product(range(self.window_size), corpus.all_letters))
        # one neuron for each phoneme trait
        self.phoneme_trait_neuron_names = list(corpus.all_phoneme_traits)
        # neuron counts
        self.n_input_neurons = len(self.letter_neuron_names)
        self.n_output_neurons = len(self.phoneme_trait_neuron_names)        
        # mapping from (pos, letter) to input neuron index
        self.letters_to_neurons = dict({(pos_and_letter, index) for index, pos_and_letter in enumerate(self.letter_neuron_names)})
        # mapping from trait to neuron
        self.traits_to_neurons = dict({(trait, index) for index, trait in enumerate(self.phoneme_trait_neuron_names)})
        # mapping from phoneme to layer
        self.phonemes_to_layers = {}
        for (phoneme, traits) in corpus.phoneme_traits.iteritems():
            layer = zeros(self.n_output_neurons)
            for trait in traits:
                index = self.traits_to_neurons[trait]
                layer[index] = 1
            self.phonemes_to_layers[phoneme] = layer
            
    def _generate_pybrain_network(self):
        # make network
        self._pybrain_network = FeedForwardNetwork()
        # make layers
        self._in_layer = LinearLayer(self.n_input_neurons, name='in')
        self._hidden_layer = SigmoidLayer(self.n_hidden_neurons, name='hidden')
        self._out_layer = LinearLayer(self.n_output_neurons, name='out')
        self._bias_neuron = BiasUnit(name='bias')
        # make connections between layers
        self._in_hidden_connection = FullConnection(self._in_layer, self._hidden_layer)
        self._hidden_out_connection = FullConnection(self._hidden_layer, self._out_layer)
        self._bias_hidden_connection = FullConnection(self._bias_neuron, self._hidden_layer)
        self._bias_out_connection = FullConnection(self._bias_neuron, self._out_layer)
        # add modules to network
        self._pybrain_network.addInputModule(self._in_layer)
        self._pybrain_network.addModule(self._hidden_layer)
        self._pybrain_network.addOutputModule(self._out_layer)
        self._pybrain_network.addModule(self._bias_neuron)
        # add connections to network
        for c in (self._in_hidden_connection, self._hidden_out_connection, self._bias_hidden_connection, self._bias_out_connection):
            self._pybrain_network.addConnection(c)
        # initialize network with added modules/connections
        self._pybrain_network.sortModules()

    def windowIter(self, letters):
        assert type(letters) == str
        padding_before = ' ' * self.window_middle
        padding_after = ' ' * (self.window_size - self.window_middle - 1)
        padded_letters = padding_before + letters + padding_after
        # for each letter in the sample
        for l_num in range(len(letters)):
            letters_window = padded_letters[l_num:l_num+self.window_size]
            yield letters_window    

    def generateSamples(self, letters, phonemes):
        assert len(letters) == len(phonemes)
        for (letters_window, current_phoneme) in izip(self.windowIter(letters), phonemes):
            yield self.letters_to_layer(letters_window), self.phoneme_to_layer(current_phoneme)

    def letters_to_layer(self, letters):
        assert len(letters) == self.window_size
        # start with empty layer
        layer = zeros(self.n_input_neurons)
        # loop through letters and activate each neuron
        for (pos, letter) in enumerate(letters):
            index = self.letters_to_neurons[(pos, letter)]
            layer[index] = 1
        return layer
        
    def train(self, training_set, n_epochs=1, callback=None):
        # build dataset
        dataset = DataSet(self.n_input_neurons, self.n_output_neurons)
        for (ltr,ph) in training_set:
            for sample in self.generateSamples(ltr,ph):
                dataset.addSample(*sample)
        # build trainer
        trainer = Trainer(self._pybrain_network, dataset, 0.01, 1.0, 0.9)
#.........这里部分代码省略.........
开发者ID:LocusCoeruleus,项目名称:netwhisperer,代码行数:101,代码来源:network.py


示例20: main

def main():
    a = 0
    for i in range(0,100):
        inLayer = SigmoidLayer(2)
        hiddenLayer = SigmoidLayer(3)
        outLayer = SigmoidLayer(1)
        
        net = FeedForwardNetwork()
        net.addInputModule(inLayer)
        net.addModule(hiddenLayer)
        net.addOutputModule(outLayer)
        
        in_to_hidden = FullConnection(inLayer,hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer,outLayer)
        
        net.addConnection(in_to_hidden)
        net.addConnection(hidden_to_out)
        
        net.sortModules()
        
        ds = SupervisedDataSet(2,1)
        ds.addSample((1,1), (0))
        ds.addSample((1,0), (1))
        ds.addSample((0,1), (1))
        ds.addSample((0,0), (0))
        
        trainer = BackpropTrainer(net,ds)
        trainer.trainUntilConvergence()
        
        out = net.activate((1,1))
        if (out < 0.5):
            a = a + 1
    print(str(a) + "/100")
开发者ID:Kerzak1408,项目名称:HearthstoneAI,代码行数:33,代码来源:neural_network.py



注:本文中的pybrain.structure.networks.feedforward.FeedForwardNetwork类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python parametercontainer.ParameterContainer类代码示例发布时间:2022-05-25
下一篇:
Python module.Module类代码示例发布时间:2022-05-25
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap