本文整理汇总了Golang中github.com/unixpickle/weakai/neuralnet.Network类的典型用法代码示例。如果您正苦于以下问题:Golang Network类的具体用法?Golang Network怎么用?Golang Network使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Network类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Golang代码示例。
示例1: main
func main() {
rand.Seed(time.Now().UnixNano())
outNet := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: StateSize * 2,
OutputCount: 10,
},
&neuralnet.Sigmoid{},
&neuralnet.DenseLayer{
InputCount: 10,
OutputCount: 2,
},
&neuralnet.LogSoftmaxLayer{},
}
outNet.Randomize()
bd := &rnn.Bidirectional{
Forward: &rnn.BlockSeqFunc{B: rnn.NewGRU(2, StateSize)},
Backward: &rnn.BlockSeqFunc{B: rnn.NewGRU(2, StateSize)},
Output: &rnn.NetworkSeqFunc{Network: outNet},
}
var samples []seqtoseq.Sample
var sampleSet sgd.SliceSampleSet
for i := 0; i < TrainingSize; i++ {
samples = append(samples, generateSequence())
sampleSet = append(sampleSet, samples[i])
}
g := &sgd.RMSProp{
Gradienter: &seqtoseq.Gradienter{
SeqFunc: bd,
Learner: bd,
CostFunc: neuralnet.DotCost{},
},
}
var i int
sgd.SGDInteractive(g, sampleSet, StepSize, BatchSize, func() bool {
fmt.Printf("%d epochs: cost=%f\n", i, totalCost(bd, sampleSet))
i++
return true
})
var testingCorrect, testingTotal int
for j := 0; j < TestingSize; j++ {
sample := generateSequence()
inRes := seqfunc.ConstResult([][]linalg.Vector{sample.Inputs})
output := bd.ApplySeqs(inRes).OutputSeqs()[0]
for i, expected := range sample.Outputs {
actual := output[i]
if math.Abs(expected[0]-math.Exp(actual[0])) < 0.1 {
testingCorrect++
}
testingTotal++
}
}
fmt.Printf("Got %d/%d (%.2f%%)\n", testingCorrect, testingTotal,
100*float64(testingCorrect)/float64(testingTotal))
}
开发者ID:unixpickle,项目名称:weakai,代码行数:60,代码来源:main.go
示例2: printScore
func printScore(prefix string, n neuralnet.Network, d mnist.DataSet) {
classifier := func(v []float64) int {
r := n.Apply(&autofunc.Variable{v})
return networkOutput(r)
}
correctCount := d.NumCorrect(classifier)
histogram := d.CorrectnessHistogram(classifier)
log.Printf("%s: %d/%d - %s", prefix, correctCount, len(d.Samples), histogram)
}
开发者ID:unixpickle,项目名称:weakai,代码行数:9,代码来源:main.go
示例3: NewNetworkBlock
// NewNetworkBlock creates a NetworkBlock.
func NewNetworkBlock(n neuralnet.Network, stateSize int) *NetworkBlock {
return &NetworkBlock{
batcherBlock: &BatcherBlock{
B: n.BatchLearner(),
StateSize: stateSize,
Start: &autofunc.Variable{Vector: make(linalg.Vector, stateSize)},
},
network: n,
}
}
开发者ID:unixpickle,项目名称:weakai,代码行数:11,代码来源:network_block.go
示例4: TestStackedBlock
func TestStackedBlock(t *testing.T) {
testVars := []*autofunc.Variable{
{Vector: []float64{0.098591, -0.595453, -0.751214, 0.266051}},
{Vector: []float64{0.988517, 0.107284, -0.331529, 0.028565}},
{Vector: []float64{-0.150604, 0.889039, 0.120916, 0.240999}},
{Vector: []float64{0.961058, 0.878608, 0.052284, -0.635746}},
{Vector: []float64{0.31415, -0.2718}},
{Vector: []float64{-0.6}},
}
testSeqs := [][]*autofunc.Variable{
{testVars[0], testVars[2]},
{testVars[1]},
{testVars[2], testVars[1], testVars[3]},
}
testRV := autofunc.RVector{
testVars[0]: []float64{0.62524, 0.52979, 0.33020, 0.54462},
testVars[1]: []float64{0.13498, 0.12607, 0.35989, 0.23255},
testVars[2]: []float64{0.85996, 0.68435, 0.68506, 0.96907},
testVars[3]: []float64{0.79095, 0.33867, 0.86759, 0.16159},
testVars[4]: []float64{-0.79095, 0.33867},
testVars[5]: []float64{0.33867},
}
net1 := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: 6,
OutputCount: 6,
},
&neuralnet.HyperbolicTangent{},
}
net1.Randomize()
net2 := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: 5,
OutputCount: 5,
},
&neuralnet.HyperbolicTangent{},
}
net2.Randomize()
block := &rnn.StackedBlock{
&rnn.BatcherBlock{B: net1.BatchLearner(), StateSize: 2, Start: testVars[4]},
&rnn.BatcherBlock{B: net2.BatchLearner(), StateSize: 1, Start: testVars[5]},
}
checker := &BlockChecker{
B: block,
Input: testSeqs,
Vars: testVars,
RV: testRV,
}
checker.FullCheck(t)
}
开发者ID:unixpickle,项目名称:weakai,代码行数:50,代码来源:stacked_block_test.go
示例5: firstBitTest
// firstBitTest builds a neural network to:
// - output 0 for inputs starting with a 1
// - output 1 for inputs starting with a 0.
func firstBitTest() {
trainingSamples := make([]linalg.Vector, FirstBitTrainingSize)
trainingOutputs := make([]linalg.Vector, FirstBitTrainingSize)
for i := range trainingSamples {
trainingSamples[i] = make(linalg.Vector, FirstBitInputSize)
for j := range trainingSamples[i] {
trainingSamples[i][j] = float64(rand.Intn(2))
}
trainingOutputs[i] = []float64{1 - trainingSamples[i][0]}
}
samples := neuralnet.VectorSampleSet(trainingSamples, trainingOutputs)
network := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: FirstBitInputSize,
OutputCount: FirstBitHiddenSize,
},
&neuralnet.Sigmoid{},
&neuralnet.DenseLayer{
InputCount: FirstBitHiddenSize,
OutputCount: 1,
},
&neuralnet.Sigmoid{},
}
network.Randomize()
batcher := &neuralnet.SingleRGradienter{
Learner: network,
CostFunc: neuralnet.MeanSquaredCost{},
}
sgd.SGD(batcher, samples, 0.2, 100000, 1)
var totalError float64
var maxPossibleError float64
for i := 0; i < 50; i++ {
sample := make([]float64, FirstBitInputSize)
for j := range sample {
sample[j] = float64(rand.Intn(2))
}
result := network.Apply(&autofunc.Variable{sample})
output := result.Output()[0]
amountError := math.Abs(output - (1 - sample[0]))
totalError += amountError
maxPossibleError += 1.0
}
fmt.Printf("firstBitTest() error rate: %f\n", totalError/maxPossibleError)
}
开发者ID:unixpickle,项目名称:weakai,代码行数:51,代码来源:main.go
示例6: createNet
func createNet(d mnist.DataSet) neuralnet.Network {
convOutWidth := (d.Width-FilterSize)/FilterStride + 1
convOutHeight := (d.Height-FilterSize)/FilterStride + 1
poolOutWidth := convOutWidth / MaxPoolingSpan
if convOutWidth%MaxPoolingSpan != 0 {
poolOutWidth++
}
poolOutHeight := convOutWidth / MaxPoolingSpan
if convOutHeight%MaxPoolingSpan != 0 {
poolOutHeight++
}
net := neuralnet.Network{
&neuralnet.ConvLayer{
FilterCount: FilterCount,
FilterWidth: FilterSize,
FilterHeight: FilterSize,
Stride: FilterStride,
InputWidth: d.Width,
InputHeight: d.Height,
InputDepth: 1,
},
&neuralnet.Sigmoid{},
&neuralnet.MaxPoolingLayer{
XSpan: MaxPoolingSpan,
YSpan: MaxPoolingSpan,
InputWidth: convOutWidth,
InputHeight: convOutHeight,
InputDepth: FilterCount,
},
&neuralnet.DenseLayer{
InputCount: poolOutWidth * poolOutHeight * FilterCount,
OutputCount: HiddenSize,
},
&neuralnet.Sigmoid{},
&neuralnet.DenseLayer{
InputCount: HiddenSize,
OutputCount: LabelCount,
},
&neuralnet.SoftmaxLayer{},
}
net.Randomize()
return net
}
开发者ID:unixpickle,项目名称:weakai,代码行数:46,代码来源:main.go
示例7: countCorrect
func countCorrect(n neuralnet.Network, s sgd.SampleSet) int {
var count int
for i := 0; i < s.Len(); i++ {
sample := s.GetSample(i).(neuralnet.VectorSample)
output := n.Apply(&autofunc.Variable{Vector: sample.Input}).Output()
var maxIdx int
var maxVal float64
for j, x := range output {
if x > maxVal || j == 0 {
maxIdx = j
maxVal = x
}
}
if sample.Output[maxIdx] == 1 {
count++
}
}
return count
}
开发者ID:unixpickle,项目名称:weakai,代码行数:19,代码来源:train.go
示例8: TestBaselineChecks
func TestBaselineChecks(t *testing.T) {
network := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: 4,
OutputCount: 6,
},
neuralnet.HyperbolicTangent{},
}
network.Randomize()
for stateSize := 0; stateSize < 4; stateSize++ {
start := &autofunc.Variable{Vector: make(linalg.Vector, stateSize)}
for i := range start.Vector {
start.Vector[i] = rand.NormFloat64()
}
toTest := &rnn.BlockSeqFunc{
B: &rnn.BatcherBlock{
B: network.BatchLearner(),
StateSize: stateSize,
Start: start,
},
}
seqs, rv := randBaselineTestSeqs(network, 4-stateSize)
rv[start] = make(linalg.Vector, len(start.Vector))
for i := range rv[start] {
rv[start][i] = rand.NormFloat64()
}
vars := make([]*autofunc.Variable, 0, len(rv))
for v := range rv {
vars = append(vars, v)
}
checker := &functest.SeqRFuncChecker{
F: toTest,
Vars: vars,
Input: seqs,
RV: rv,
}
checker.FullCheck(t)
}
}
开发者ID:unixpickle,项目名称:weakai,代码行数:40,代码来源:baseline_test.go
示例9: trainClassifier
func trainClassifier(n neuralnet.Network, d mnist.DataSet) {
log.Println("Training classifier (ctrl+C to finish)...")
killChan := make(chan struct{})
go func() {
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
<-c
signal.Stop(c)
fmt.Println("\nCaught interrupt. Ctrl+C again to terminate.")
close(killChan)
}()
inputs := make([]linalg.Vector, len(d.Samples))
outputs := make([]linalg.Vector, len(d.Samples))
for i, x := range d.IntensityVectors() {
inputs[i] = x
}
for i, x := range d.LabelVectors() {
outputs[i] = x
}
samples := neuralnet.VectorSampleSet(inputs, outputs)
batcher := &neuralnet.BatchRGradienter{
Learner: n.BatchLearner(),
CostFunc: neuralnet.MeanSquaredCost{},
}
crossValidation := mnist.LoadTestingDataSet()
sgd.SGDInteractive(batcher, samples, ClassifierStepSize,
ClassifierBatchSize, func() bool {
printScore("Training", n, d)
printScore("Cross", n, crossValidation)
return true
})
}
开发者ID:unixpickle,项目名称:weakai,代码行数:37,代码来源:main.go
示例10: runHorizontalLineTest
func runHorizontalLineTest(name string, network neuralnet.Network) {
trainingSamples := make([]linalg.Vector, GridTrainingSize)
trainingOutputs := make([]linalg.Vector, GridTrainingSize)
for i := range trainingSamples {
trainingSamples[i] = randomBitmap()
if bitmapHasHorizontal(trainingSamples[i]) {
trainingOutputs[i] = []float64{1}
} else {
trainingOutputs[i] = []float64{0}
}
}
samples := neuralnet.VectorSampleSet(trainingSamples, trainingOutputs)
network.Randomize()
batcher := &neuralnet.SingleRGradienter{
Learner: network,
CostFunc: neuralnet.MeanSquaredCost{},
}
sgd.SGD(batcher, samples, 0.1, 1000, 100)
var trainingError float64
var maxTrainingError float64
for i, sample := range trainingSamples {
result := network.Apply(&autofunc.Variable{sample})
output := result.Output()[0]
amountError := math.Abs(output - trainingOutputs[i][0])
trainingError += amountError
maxTrainingError += 1.0
}
var totalError float64
var maxPossibleError float64
for i := 0; i < 50; i++ {
sample := randomBitmap()
var expected float64
if bitmapHasHorizontal(sample) {
expected = 1
}
result := network.Apply(&autofunc.Variable{sample})
output := result.Output()[0]
amountError := math.Abs(output - expected)
totalError += amountError
maxPossibleError += 1.0
}
fmt.Printf("%s() training error: %f; cross error: %f\n", name,
trainingError/maxTrainingError, totalError/maxPossibleError)
}
开发者ID:unixpickle,项目名称:weakai,代码行数:48,代码来源:main.go
示例11: TestStateOutBlock
func TestStateOutBlock(t *testing.T) {
net := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: 8,
OutputCount: 4,
},
&neuralnet.HyperbolicTangent{},
}
net.Randomize()
startVar := &autofunc.Variable{Vector: []float64{0.3, -0.3, 0.2, 0.5}}
block := &rnn.StateOutBlock{
Block: &rnn.BatcherBlock{
B: net.BatchLearner(),
StateSize: 4,
Start: startVar,
},
}
learner := append(stateOutBlockLearner{startVar}, net.Parameters()...)
NewChecker4In(block, learner).FullCheck(t)
}
开发者ID:unixpickle,项目名称:weakai,代码行数:20,代码来源:state_out_block_test.go
示例12: TrainCmd
func TrainCmd(netPath, dirPath string) {
log.Println("Loading samples...")
images, width, height, err := LoadTrainingImages(dirPath)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
log.Println("Creating network...")
var network neuralnet.Network
networkData, err := ioutil.ReadFile(netPath)
if err == nil {
network, err = neuralnet.DeserializeNetwork(networkData)
if err != nil {
fmt.Fprintln(os.Stderr, "Failed to load network:", err)
os.Exit(1)
}
log.Println("Loaded network from file.")
} else {
mean, stddev := sampleStatistics(images)
convLayer := &neuralnet.ConvLayer{
FilterCount: FilterCount,
FilterWidth: 4,
FilterHeight: 4,
Stride: 2,
InputWidth: width,
InputHeight: height,
InputDepth: ImageDepth,
}
maxLayer := &neuralnet.MaxPoolingLayer{
XSpan: 3,
YSpan: 3,
InputWidth: convLayer.OutputWidth(),
InputHeight: convLayer.OutputHeight(),
InputDepth: convLayer.OutputDepth(),
}
convLayer1 := &neuralnet.ConvLayer{
FilterCount: FilterCount1,
FilterWidth: 3,
FilterHeight: 3,
Stride: 2,
InputWidth: maxLayer.OutputWidth(),
InputHeight: maxLayer.OutputHeight(),
InputDepth: maxLayer.InputDepth,
}
network = neuralnet.Network{
&neuralnet.RescaleLayer{
Bias: -mean,
Scale: 1 / stddev,
},
convLayer,
neuralnet.HyperbolicTangent{},
maxLayer,
neuralnet.HyperbolicTangent{},
convLayer1,
neuralnet.HyperbolicTangent{},
&neuralnet.DenseLayer{
InputCount: convLayer1.OutputWidth() * convLayer1.OutputHeight() *
convLayer1.OutputDepth(),
OutputCount: HiddenSize,
},
neuralnet.HyperbolicTangent{},
&neuralnet.DenseLayer{
InputCount: HiddenSize,
OutputCount: len(images),
},
&neuralnet.LogSoftmaxLayer{},
}
network.Randomize()
log.Println("Created new network.")
}
samples := neuralSamples(images)
sgd.ShuffleSampleSet(samples)
validationCount := int(ValidationFraction * float64(samples.Len()))
validationSamples := samples.Subset(0, validationCount)
trainingSamples := samples.Subset(validationCount, samples.Len())
costFunc := neuralnet.DotCost{}
gradienter := &sgd.Adam{
Gradienter: &neuralnet.BatchRGradienter{
Learner: network.BatchLearner(),
CostFunc: &neuralnet.RegularizingCost{
Variables: network.Parameters(),
Penalty: Regularization,
CostFunc: costFunc,
},
},
}
sgd.SGDInteractive(gradienter, trainingSamples, StepSize, BatchSize, func() bool {
log.Printf("Costs: validation=%d/%d cost=%f",
countCorrect(network, validationSamples), validationSamples.Len(),
neuralnet.TotalCost(costFunc, network, trainingSamples))
return true
})
//.........这里部分代码省略.........
开发者ID:unixpickle,项目名称:weakai,代码行数:101,代码来源:train.go
示例13: createNetwork
func createNetwork(samples sgd.SampleSet) *rnn.Bidirectional {
means := make(linalg.Vector, FeatureCount)
var count float64
for i := 0; i < samples.Len(); i++ {
inputSeq := samples.GetSample(i).(ctc.Sample).Input
for _, vec := range inputSeq {
means.Add(vec)
count++
}
}
means.Scale(-1 / count)
stddevs := make(linalg.Vector, FeatureCount)
for i := 0; i < samples.Len(); i++ {
inputSeq := samples.GetSample(i).(ctc.Sample).Input
for _, vec := range inputSeq {
for j, v := range vec {
stddevs[j] += math.Pow(v+means[j], 2)
}
}
}
stddevs.Scale(1 / count)
for i, x := range stddevs {
stddevs[i] = 1 / math.Sqrt(x)
}
outputNet := neuralnet.Network{
&neuralnet.DropoutLayer{
KeepProbability: HiddenDropout,
Training: false,
},
&neuralnet.DenseLayer{
InputCount: HiddenSize * 2,
OutputCount: OutHiddenSize,
},
&neuralnet.HyperbolicTangent{},
&neuralnet.DenseLayer{
InputCount: OutHiddenSize,
OutputCount: len(cubewhisper.Labels) + 1,
},
&neuralnet.LogSoftmaxLayer{},
}
outputNet.Randomize()
inputNet := neuralnet.Network{
&neuralnet.VecRescaleLayer{
Biases: means,
Scales: stddevs,
},
&neuralnet.GaussNoiseLayer{
Stddev: InputNoise,
Training: false,
},
}
netBlock := rnn.NewNetworkBlock(inputNet, 0)
forwardBlock := rnn.StackedBlock{
netBlock,
rnn.NewGRU(FeatureCount, HiddenSize),
}
backwardBlock := rnn.StackedBlock{
netBlock,
rnn.NewGRU(FeatureCount, HiddenSize),
}
for _, block := range []rnn.StackedBlock{forwardBlock, backwardBlock} {
for i, param := range block.Parameters() {
if i%2 == 0 {
for i := range param.Vector {
param.Vector[i] = rand.NormFloat64() * WeightStddev
}
}
}
}
return &rnn.Bidirectional{
Forward: &rnn.BlockSeqFunc{Block: forwardBlock},
Backward: &rnn.BlockSeqFunc{Block: backwardBlock},
Output: &rnn.NetworkSeqFunc{Network: outputNet},
}
}
开发者ID:unixpickle,项目名称:cubewhisper,代码行数:79,代码来源:train.go
示例14: TestBaselineOutput
// TestBaselineOutput makes sure that the BatcherBlock +
// BlockSeqFunc combo produces the right output, since
// that combo will be used for the rest of the tests.
func TestBaselineOutput(t *testing.T) {
network := neuralnet.Network{
&neuralnet.DenseLayer{
InputCount: 4,
OutputCount: 6,
},
neuralnet.HyperbolicTangent{},
}
network.Randomize()
for stateSize := 0; stateSize < 4; stateSize++ {
start := &autofunc.Variable{Vector: make(linalg.Vector, stateSize)}
for i := range start.Vector {
start.Vector[i] = rand.NormFloat64()
}
toTest := rnn.BlockSeqFunc{
B: &rnn.BatcherBlock{
B: network.BatchLearner(),
StateSize: stateSize,
Start: start,
},
}
seqs, rv := randBaselineTestSeqs(network, 4-stateSize)
rv[start] = make(linalg.Vector, len(start.Vector))
for i := range rv[start] {
rv[start][i] = rand.NormFloat64()
}
res := toTest.ApplySeqsR(rv, seqfunc.VarRResult(rv, seqs))
actual := res.OutputSeqs()
actualR := res.ROutputSeqs()
expected, expectedR := manualNetworkSeq(rv, network, start, seqs, stateSize)
if len(expected) != len(actual) {
t.Errorf("stateSize %d: len(expected) [%d] != len(actual) [%d]", stateSize,
len(expected), len(actual))
continue
}
for i, act := range actual {
actR := actualR[i]
exp := expected[i]
expR := expectedR[i]
if len(act) != len(exp) {
t.Errorf("stateSize %d seq %d: len(act) [%d] != len(exp) [%d]",
stateSize, i, len(act), len(act))
continue
}
for j, a := range act {
x := exp[j]
if len(a) != len(x) || x.Copy().Scale(-1).Add(a).MaxAbs() > 1e-5 {
t.Errorf("stateSize %d seq %d entry %d: expected %v got %v",
stateSize, i, j, x, a)
}
}
for j, a := range actR {
x := expR[j]
if len(a) != len(x) || x.Copy().Scale(-1).Add(a).MaxAbs() > 1e-5 {
t.Errorf("stateSize %d seq %d entry %d (R): expected %v got %v",
stateSize, i, j, x, a)
}
}
}
}
}
开发者ID:unixpickle,项目名称:weakai,代码行数:65,代码来源:baseline_test.go
示例15: Autoencode
func Autoencode(images <-chan image.Image) (neuralnet.Network, error) {
firstImage := <-images
if firstImage == nil {
return nil, errors.New("no readable images")
}
width := firstImage.Bounds().Dx()
height := firstImage.Bounds().Dy()
log.Print("Reading images...")
tensors := []*neuralnet.Tensor3{ImageTensor(firstImage)}
for img := range images {
if img.Bounds().Dx() != width || img.Bounds().Dy() != height {
log.Printf("Image size %d,%d does not match %d,%d",
img.Bounds().Dx(), img.Bounds().Dy(),
width, height)
} else {
tensors = append(tensors, ImageTensor(img))
}
}
log.Print("Training network (ctrl+c to finish)...")
tensorSlices := make([]linalg.Vector, len(tensors))
for i, tensor := range tensors {
tensorSlices[i] = tensor.Data
}
samples := neuralnet.VectorSampleSet(tensorSlices, tensorSlices)
average, stddev := statisticalInfo(tensorSlices)
network := neuralnet.Network{
&neuralnet.RescaleLayer{
Bias: -average,
Scale: 1 / stddev,
},
&neuralnet.DenseLayer{
InputCount: width * height * 3,
OutputCount: HiddenSize1,
},
neuralnet.Sigmoid{},
&neuralnet.DenseLayer{
InputCount: HiddenSize1,
OutputCount: HiddenSize2,
},
neuralnet.Sigmoid{},
&neuralnet.DenseLayer{
InputCount: HiddenSize2,
OutputCount: HiddenSize1,
},
neuralnet.Sigmoid{},
&neuralnet.DenseLayer{
InputCount: HiddenSize1,
OutputCount: width * height * 3,
},
}
network.Randomize()
ui := hessfree.NewConsoleUI()
learner := &hessfree.DampingLearner{
WrappedLearner: &hessfree.NeuralNetLearner{
Layers: network,
Output: nil,
Cost: neuralnet.SigmoidCECost{},
MaxSubBatch: MaxSubBatch,
MaxConcurrency: 2,
},
DampingCoeff: 2,
UI: ui,
}
trainer := hessfree.Trainer{
Learner: learner,
Samples: samples,
BatchSize: samples.Len(),
UI: ui,
Convergence: hessfree.ConvergenceCriteria{
MinK: 5,
},
}
trainer.Train()
network = append(network, neuralnet.Sigmoid{})
return network, nil
}
开发者ID:unixpickle,项目名称:weakai,代码行数:86,代码来源:autoencode.go
注:本文中的github.com/unixpickle/weakai/neuralnet.Network类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论