• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Golang sgd.SampleSet类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Golang中github.com/unixpickle/sgd.SampleSet的典型用法代码示例。如果您正苦于以下问题:Golang SampleSet类的具体用法?Golang SampleSet怎么用?Golang SampleSet使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了SampleSet类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Golang代码示例。

示例1: runBatch

func (b *BatchRGradienter) runBatch(rv autofunc.RVector, rgrad autofunc.RGradient,
	grad autofunc.Gradient, s sgd.SampleSet) {
	if s.Len() == 0 {
		return
	}

	sampleCount := s.Len()
	firstSample := s.GetSample(0).(VectorSample)
	inputSize := len(firstSample.Input)
	outputSize := len(firstSample.Output)
	inVec := make(linalg.Vector, sampleCount*inputSize)
	outVec := make(linalg.Vector, sampleCount*outputSize)

	for i := 0; i < s.Len(); i++ {
		sample := s.GetSample(i)
		vs := sample.(VectorSample)
		copy(inVec[i*inputSize:], vs.Input)
		copy(outVec[i*outputSize:], vs.Output)
	}

	inVar := &autofunc.Variable{inVec}
	if rgrad != nil {
		rVar := autofunc.NewRVariable(inVar, rv)
		result := b.Learner.BatchR(rv, rVar, sampleCount)
		cost := b.CostFunc.CostR(rv, outVec, result)
		cost.PropagateRGradient(linalg.Vector{1}, linalg.Vector{0},
			rgrad, grad)
	} else {
		result := b.Learner.Batch(inVar, sampleCount)
		cost := b.CostFunc.Cost(outVec, result)
		cost.PropagateGradient(linalg.Vector{1}, grad)
	}
}
开发者ID:unixpickle,项目名称:weakai,代码行数:33,代码来源:batch_rgradienter.go


示例2: RGradient

func (b *SingleRGradienter) RGradient(rv autofunc.RVector, s sgd.SampleSet) (autofunc.Gradient,
	autofunc.RGradient) {
	if b.gradCache == nil {
		b.gradCache = autofunc.NewGradient(b.Learner.Parameters())
	} else {
		b.gradCache.Zero()
	}
	if b.rgradCache == nil {
		b.rgradCache = autofunc.NewRGradient(b.Learner.Parameters())
	} else {
		b.rgradCache.Zero()
	}

	for i := 0; i < s.Len(); i++ {
		sample := s.GetSample(i)
		vs := sample.(VectorSample)
		output := vs.Output
		inVar := &autofunc.Variable{vs.Input}
		rVar := autofunc.NewRVariable(inVar, rv)
		result := b.Learner.ApplyR(rv, rVar)
		cost := b.CostFunc.CostR(rv, output, result)
		cost.PropagateRGradient(linalg.Vector{1}, linalg.Vector{0},
			b.rgradCache, b.gradCache)
	}

	return b.gradCache, b.rgradCache
}
开发者ID:unixpickle,项目名称:weakai,代码行数:27,代码来源:single_rgradienter.go


示例3: TotalCost

// TotalCost returns the total cost of a layer on a
// set of VectorSamples.
// The elements of s must be VectorSamples.
func TotalCost(c CostFunc, layer autofunc.Func, s sgd.SampleSet) float64 {
	var totalCost float64
	for i := 0; i < s.Len(); i++ {
		sample := s.GetSample(i)
		vs := sample.(VectorSample)
		inVar := &autofunc.Variable{vs.Input}
		result := layer.Apply(inVar)
		costOut := c.Cost(vs.Output, result)
		totalCost += costOut.Output()[0]
	}
	return totalCost
}
开发者ID:unixpickle,项目名称:weakai,代码行数:15,代码来源:cost_func.go


示例4: subBatches

func (g *GradHelper) subBatches(s sgd.SampleSet) <-chan sgd.SampleSet {
	batchSize := g.batchSize()
	res := make(chan sgd.SampleSet, s.Len()/batchSize+1)
	for i := 0; i < s.Len(); i += batchSize {
		bs := batchSize
		if bs > s.Len()-i {
			bs = s.Len() - i
		}
		res <- s.Subset(i, i+bs)
	}
	close(res)
	return res
}
开发者ID:unixpickle,项目名称:weakai,代码行数:13,代码来源:grad_helper.go


示例5: countCorrect

func countCorrect(n neuralnet.Network, s sgd.SampleSet) int {
	var count int
	for i := 0; i < s.Len(); i++ {
		sample := s.GetSample(i).(neuralnet.VectorSample)
		output := n.Apply(&autofunc.Variable{Vector: sample.Input}).Output()
		var maxIdx int
		var maxVal float64
		for j, x := range output {
			if x > maxVal || j == 0 {
				maxIdx = j
				maxVal = x
			}
		}
		if sample.Output[maxIdx] == 1 {
			count++
		}
	}
	return count
}
开发者ID:unixpickle,项目名称:weakai,代码行数:19,代码来源:train.go


示例6: Gradient

func (b *SingleRGradienter) Gradient(s sgd.SampleSet) autofunc.Gradient {
	if b.gradCache == nil {
		b.gradCache = autofunc.NewGradient(b.Learner.Parameters())
	} else {
		b.gradCache.Zero()
	}

	for i := 0; i < s.Len(); i++ {
		sample := s.GetSample(i)
		vs := sample.(VectorSample)
		output := vs.Output
		inVar := &autofunc.Variable{vs.Input}
		result := b.Learner.Apply(inVar)
		cost := b.CostFunc.Cost(output, result)
		cost.PropagateGradient(linalg.Vector{1}, b.gradCache)
	}

	return b.gradCache
}
开发者ID:unixpickle,项目名称:weakai,代码行数:19,代码来源:single_rgradienter.go


示例7: batch

func (g *GradHelper) batch(rv autofunc.RVector, s sgd.SampleSet) (grad autofunc.Gradient,
	rgrad autofunc.RGradient) {
	g.gradCache.variables = g.Learner.Parameters()
	if g.lastGradResult != nil {
		g.gradCache.Free(g.lastGradResult)
	}
	if g.lastRGradResult != nil {
		g.gradCache.FreeR(g.lastRGradResult)
	}
	batchSize := g.batchSize()
	maxGos := g.goroutineCount()
	if s.Len() < batchSize || maxGos < 2 {
		grad, rgrad = g.runSync(rv, s)
	} else {
		grad, rgrad = g.runAsync(rv, s)
	}
	g.lastGradResult = grad
	g.lastRGradResult = rgrad
	return
}
开发者ID:unixpickle,项目名称:weakai,代码行数:20,代码来源:grad_helper.go


示例8: sampleSetSlice

// sampleSetSlice converts a sample set into a slice
// of Samples.
func sampleSetSlice(s sgd.SampleSet) []Sample {
	res := make([]Sample, s.Len())
	for i := 0; i < s.Len(); i++ {
		res[i] = s.GetSample(i).(Sample)
	}
	return res
}
开发者ID:unixpickle,项目名称:weakai,代码行数:9,代码来源:util.go


示例9: TotalCostBlock

// TotalCostBlock runs an rnn.Block on a set of Samples
// and evaluates the total output cost.
//
// The batchSize specifies how many samples to run in
// batches while computing the cost.
func TotalCostBlock(b rnn.Block, batchSize int, s sgd.SampleSet, c neuralnet.CostFunc) float64 {
	runner := &rnn.Runner{Block: b}

	var cost float64
	for i := 0; i < s.Len(); i += batchSize {
		var inSeqs, outSeqs [][]linalg.Vector
		for j := i; j < i+batchSize && j < s.Len(); j++ {
			seq := s.GetSample(j).(Sample)
			inSeqs = append(inSeqs, seq.Inputs)
			outSeqs = append(outSeqs, seq.Outputs)
		}
		output := runner.RunAll(inSeqs)
		for j, outSeq := range outSeqs {
			for t, actual := range output[j] {
				expected := outSeq[t]
				actualVar := &autofunc.Variable{Vector: actual}
				cost += c.Cost(expected, actualVar).Output()[0]
			}
		}
	}
	return cost
}
开发者ID:unixpickle,项目名称:weakai,代码行数:27,代码来源:cost.go


示例10: TotalCostSeqFunc

// TotalCostSeqFunc runs a seqfunc.RFunc on a set of
// Samples and evaluates the total output cost.
//
// The batchSize specifies how many samples to run in
// batches while computing the cost.
func TotalCostSeqFunc(f seqfunc.RFunc, batchSize int, s sgd.SampleSet,
	c neuralnet.CostFunc) float64 {
	var totalCost float64
	for i := 0; i < s.Len(); i += batchSize {
		var inSeqs [][]linalg.Vector
		var outSeqs [][]linalg.Vector
		for j := i; j < i+batchSize && j < s.Len(); j++ {
			seq := s.GetSample(j).(Sample)
			inSeqs = append(inSeqs, seq.Inputs)
			outSeqs = append(outSeqs, seq.Outputs)
		}
		output := f.ApplySeqs(seqfunc.ConstResult(inSeqs))
		for j, actualSeq := range output.OutputSeqs() {
			expectedSeq := outSeqs[j]
			for k, actual := range actualSeq {
				expected := expectedSeq[k]
				actualVar := &autofunc.Variable{Vector: actual}
				totalCost += c.Cost(expected, actualVar).Output()[0]
			}
		}
	}
	return totalCost
}
开发者ID:unixpickle,项目名称:weakai,代码行数:28,代码来源:cost.go


示例11: createNetwork

func createNetwork(samples sgd.SampleSet) *rnn.Bidirectional {
	means := make(linalg.Vector, FeatureCount)
	var count float64

	for i := 0; i < samples.Len(); i++ {
		inputSeq := samples.GetSample(i).(ctc.Sample).Input
		for _, vec := range inputSeq {
			means.Add(vec)
			count++
		}
	}
	means.Scale(-1 / count)

	stddevs := make(linalg.Vector, FeatureCount)
	for i := 0; i < samples.Len(); i++ {
		inputSeq := samples.GetSample(i).(ctc.Sample).Input
		for _, vec := range inputSeq {
			for j, v := range vec {
				stddevs[j] += math.Pow(v+means[j], 2)
			}
		}
	}
	stddevs.Scale(1 / count)
	for i, x := range stddevs {
		stddevs[i] = 1 / math.Sqrt(x)
	}

	outputNet := neuralnet.Network{
		&neuralnet.DropoutLayer{
			KeepProbability: HiddenDropout,
			Training:        false,
		},
		&neuralnet.DenseLayer{
			InputCount:  HiddenSize * 2,
			OutputCount: OutHiddenSize,
		},
		&neuralnet.HyperbolicTangent{},
		&neuralnet.DenseLayer{
			InputCount:  OutHiddenSize,
			OutputCount: len(cubewhisper.Labels) + 1,
		},
		&neuralnet.LogSoftmaxLayer{},
	}
	outputNet.Randomize()

	inputNet := neuralnet.Network{
		&neuralnet.VecRescaleLayer{
			Biases: means,
			Scales: stddevs,
		},
		&neuralnet.GaussNoiseLayer{
			Stddev:   InputNoise,
			Training: false,
		},
	}
	netBlock := rnn.NewNetworkBlock(inputNet, 0)
	forwardBlock := rnn.StackedBlock{
		netBlock,
		rnn.NewGRU(FeatureCount, HiddenSize),
	}
	backwardBlock := rnn.StackedBlock{
		netBlock,
		rnn.NewGRU(FeatureCount, HiddenSize),
	}
	for _, block := range []rnn.StackedBlock{forwardBlock, backwardBlock} {
		for i, param := range block.Parameters() {
			if i%2 == 0 {
				for i := range param.Vector {
					param.Vector[i] = rand.NormFloat64() * WeightStddev
				}
			}
		}
	}
	return &rnn.Bidirectional{
		Forward:  &rnn.BlockSeqFunc{Block: forwardBlock},
		Backward: &rnn.BlockSeqFunc{Block: backwardBlock},
		Output:   &rnn.NetworkSeqFunc{Network: outputNet},
	}
}
开发者ID:unixpickle,项目名称:cubewhisper,代码行数:79,代码来源:train.go



注:本文中的github.com/unixpickle/sgd.SampleSet类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Golang neuralnet.Network类代码示例发布时间:2022-05-29
下一篇:
Golang linalg.Vector类代码示例发布时间:2022-05-29
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap