本文整理汇总了C#中HiddenMarkovModel类的典型用法代码示例。如果您正苦于以下问题:C# HiddenMarkovModel类的具体用法?C# HiddenMarkovModel怎么用?C# HiddenMarkovModel使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HiddenMarkovModel类属于命名空间,在下文中一共展示了HiddenMarkovModel类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: EstimateXi
public override double[][][] EstimateXi(IMLDataSet sequence,
ForwardBackwardCalculator fbc, HiddenMarkovModel hmm)
{
if (sequence.Count <= 1)
{
throw new EncogError(
"Must have more than one observation");
}
double[][][] xi = EngineArray.AllocDouble3D((int)sequence.Count - 1, hmm
.StateCount, hmm.StateCount);
for (int t = 0; t < (sequence.Count - 1); t++)
{
IMLDataPair observation = sequence[t+1];
for (int i = 0; i < hmm.StateCount; i++)
{
for (int j = 0; j < hmm.StateCount; j++)
{
xi[t][i][j] = fbc.AlphaElement(t, i)
* hmm.TransitionProbability[i][j]
* hmm.StateDistributions[j].Probability(
observation) * fbc.BetaElement(t + 1, j);
}
}
}
return xi;
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:30,代码来源:TrainBaumWelchScaled.cs
示例2: FixedLagSmoothing
public FixedLagSmoothing(HiddenMarkovModel hmm, int timelag)
{
this.hmm = hmm;
this.timelag = timelag;
this.evidenceFromSmoothedStepToPresent = new List<String>();
this.time = 1;
this.forwardMessage = hmm.prior();
this.B = hmm.transitionModel().unitMatrix();
}
开发者ID:PaulMineau,项目名称:AIMA.Net,代码行数:9,代码来源:FixedLagSmoothing.cs
示例3: TrainKMeans
/// <summary>
/// Construct a KMeans trainer.
/// </summary>
/// <param name="method">The HMM.</param>
/// <param name="sequences">The training data.</param>
public TrainKMeans(HiddenMarkovModel method,
IMLSequenceSet sequences)
{
_method = method;
_modelHmm = method;
_states = method.StateCount;
_training = sequences;
_clusters = new Clusters(_states, sequences);
_done = false;
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:15,代码来源:TrainKMeans.cs
示例4: ViterbiCalculator
public ViterbiCalculator(IMLDataSet oseq, HiddenMarkovModel hmm)
{
if (oseq.Count < 1)
{
throw new EncogError("Must not have empty sequence");
}
this.delta = EngineArray.AllocateDouble2D((int)oseq.Count, hmm.StateCount);
this.psy = EngineArray.AllocateInt2D((int)oseq.Count, hmm.StateCount);
this._stateSequence = new int[oseq.Count];
for (int i = 0; i < hmm.StateCount; i++)
{
this.delta[0][i] = -Math.Log(hmm.GetPi(i))
- Math.Log(hmm.StateDistributions[i].Probability(
oseq[0]));
this.psy[0][i] = 0;
}
int t = 1;
for (int index = 1; index < oseq.Count; index++)
{
IMLDataPair observation = oseq[index];
for (int i = 0; i < hmm.StateCount; i++)
{
ComputeStep(hmm, observation, t, i);
}
t++;
}
this.lnProbability = Double.PositiveInfinity;
for (int i = 0; i < hmm.StateCount; i++)
{
double thisProbability = this.delta[oseq.Count - 1][i];
if (this.lnProbability > thisProbability)
{
this.lnProbability = thisProbability;
_stateSequence[oseq.Count - 1] = i;
}
}
this.lnProbability = -this.lnProbability;
for (int t2 = (int)(oseq.Count - 2); t2 >= 0; t2--)
{
_stateSequence[t2] = this.psy[t2 + 1][_stateSequence[t2 + 1]];
}
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:50,代码来源:ViterbiCalculator.cs
示例5: Distance
public double Distance(HiddenMarkovModel hmm1,
HiddenMarkovModel hmm2)
{
double distance = 0.0;
for (int i = 0; i < SequenceCount; i++)
{
IMLDataSet oseq = new MarkovGenerator(hmm1)
.ObservationSequence(Len);
distance += (new ForwardBackwardScaledCalculator(oseq, hmm1)
.LnProbability() - new ForwardBackwardScaledCalculator(
oseq, hmm2).LnProbability())
/Len;
}
return distance/SequenceCount;
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:18,代码来源:KullbackLeiblerDistanceCalculator.cs
示例6: ForwardBackwardCalculator
/// <summary>
/// Construct the object.
/// </summary>
/// <param name="oseq">The sequence.</param>
/// <param name="hmm">The hidden markov model to use.</param>
/// <param name="doAlpha">Do alpha?</param>
/// <param name="doBeta">Do beta?</param>
public ForwardBackwardCalculator(IMLDataSet oseq,
HiddenMarkovModel hmm, bool doAlpha, bool doBeta)
{
if (oseq.Count < 1)
{
throw new EncogError("Empty sequence");
}
if (doAlpha)
{
ComputeAlpha(hmm, oseq);
}
if (doBeta)
{
ComputeBeta(hmm, oseq);
}
ComputeProbability(oseq, hmm, doAlpha, doBeta);
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:27,代码来源:ForwardBackwardCalculator.cs
示例7: ForwardBackwardScaledCalculator
/// <summary>
/// Construct the calculator.
/// </summary>
/// <param name="seq">The sequence.</param>
/// <param name="hmm">The HMM.</param>
/// <param name="doAlpha">Should alpha be calculated.</param>
/// <param name="doBeta">Should beta be calculated.</param>
public ForwardBackwardScaledCalculator(
IMLDataSet seq, HiddenMarkovModel hmm,
bool doAlpha, bool doBeta)
{
if (seq.Count < 1)
{
throw new EncogError("Count cannot be less than one.");
}
_ctFactors = new double[seq.Count];
EngineArray.Fill(_ctFactors, 0.0);
ComputeAlpha(hmm, seq);
if (doBeta)
{
ComputeBeta(hmm, seq);
}
ComputeProbability(seq, hmm, doAlpha, doBeta);
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:28,代码来源:ForwardBackwardScaledCalculator.cs
示例8: GenerateForwardBackwardCalculator
public abstract ForwardBackwardCalculator GenerateForwardBackwardCalculator(
IMLDataSet sequence, HiddenMarkovModel hmm);
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:2,代码来源:BaseBaumWelch.cs
示例9: EstimateXi
public abstract double[][][] EstimateXi(IMLDataSet sequence,
ForwardBackwardCalculator fbc, HiddenMarkovModel hmm);
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:2,代码来源:BaseBaumWelch.cs
示例10: Iteration
public void Iteration()
{
HiddenMarkovModel nhmm;
nhmm = _method.Clone();
var allGamma = new double[_training.SequenceCount][][];
double[][] aijNum = EngineArray.AllocateDouble2D(_method.StateCount, _method.StateCount);
var aijDen = new double[_method.StateCount];
EngineArray.Fill(aijDen, 0.0);
for (int i = 0; i < _method.StateCount; i++)
{
EngineArray.Fill(aijNum[i], 0.0);
}
int g = 0;
foreach (IMLDataSet obsSeq in _training.Sequences)
{
ForwardBackwardCalculator fbc = GenerateForwardBackwardCalculator(
obsSeq, _method);
double[][][] xi = EstimateXi(obsSeq, fbc, _method);
double[][] gamma = allGamma[g++] = EstimateGamma(xi, fbc);
for (int i = 0; i < _method.StateCount; i++)
{
for (int t = 0; t < (obsSeq.Count - 1); t++)
{
aijDen[i] += gamma[t][i];
for (int j = 0; j < _method.StateCount; j++)
{
aijNum[i][j] += xi[t][i][j];
}
}
}
}
for (int i = 0; i < _method.StateCount; i++)
{
if (aijDen[i] == 0.0)
{
for (int j = 0; j < _method.StateCount; j++)
{
nhmm.TransitionProbability[i][j] =
_method.TransitionProbability[i][j];
}
}
else
{
for (int j = 0; j < _method.StateCount; j++)
{
nhmm.TransitionProbability[i][j] = aijNum[i][j]
/aijDen[i];
}
}
}
/* compute pi */
for (int i = 0; i < _method.StateCount; i++)
{
nhmm.Pi[i] = 0.0;
}
for (int o = 0; o < _training.SequenceCount; o++)
{
for (int i = 0; i < _method.StateCount; i++)
{
nhmm.Pi[i] += (allGamma[o][0][i]/_training
.SequenceCount);
}
}
/* compute pdfs */
for (int i = 0; i < _method.StateCount; i++)
{
var weights = new double[_training.Count];
double sum = 0.0;
int j = 0;
int o = 0;
foreach (IMLDataSet obsSeq in _training.Sequences)
{
for (int t = 0; t < obsSeq.Count; t++, j++)
{
sum += weights[j] = allGamma[o][t][i];
}
o++;
}
for (j--; j >= 0; j--)
{
weights[j] /= sum;
}
IStateDistribution opdf = nhmm.StateDistributions[i];
opdf.Fit(_training, weights);
}
_method = nhmm;
//.........这里部分代码省略.........
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:101,代码来源:BaseBaumWelch.cs
示例11: BaumWelchLearning
/// <summary>
/// Creates a new instance of the Baum-Welch learning algorithm.
/// </summary>
public BaumWelchLearning(HiddenMarkovModel model)
: base(model)
{
this.model = model;
}
开发者ID:atosorigin,项目名称:Kinect,代码行数:8,代码来源:BaumWelchLearning.cs
示例12: LearnOpdf
/// <summary>
/// Learn the distribution.
/// </summary>
/// <param name="hmm">The HMM.</param>
private void LearnOpdf(HiddenMarkovModel hmm)
{
for (int i = 0; i < hmm.StateCount; i++)
{
ICollection<IMLDataPair> clusterObservations = _clusters
.Cluster(i);
if (clusterObservations.Count < 1)
{
IStateDistribution o = _modelHmm.CreateNewDistribution();
hmm.StateDistributions[i] = o;
}
else
{
var temp = new BasicMLDataSet();
foreach (IMLDataPair pair in clusterObservations)
{
temp.Add(pair);
}
hmm.StateDistributions[i].Fit(temp);
}
}
}
开发者ID:benw408701,项目名称:MLHCTransactionPredictor,代码行数:27,代码来源:TrainKMeans.cs
示例13: MarkovGenerator
public MarkovGenerator(HiddenMarkovModel hmm)
{
this._hmm = hmm;
NewSequence();
}
开发者ID:firestrand,项目名称:encog-dotnet-core,代码行数:5,代码来源:MarkovGenerator.cs
示例14: horizontalEvalution
/*
* this method passes all the possible directions
* that can create a horizontal line to the HMM library
* as observations. A and B are the state and
* observation probabilities for the model
* pi is the intial state of the model
* the function returns true if the input matches any
* of the observations used to train the model
*/
bool horizontalEvalution(int [] input)
{
if(input.Length != 1){
return false;
}
int[][] sequences = new int[][]
{
new int[]{ EAST },
new int[] { WEST }
};
double [,] A = new double[8,8]
{
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0}
};
double [,] B = new double[8,8]
{
{1, 0, 0, 0, 0, 0, 0, 0},
{0, 1, 0, 0, 0, 0, 0, 0},
{0, 0, 1, 0, 0, 0, 0, 0},
{0, 0, 0, 1, 0, 0, 0, 0},
{0, 0, 0, 0, 1, 0, 0, 0},
{0, 0, 0, 0, 0, 1, 0, 0},
{0, 0, 0, 0, 0, 0, 1, 0},
{0, 0, 0, 0, 0, 0, 0, 1}
};
double [] pi = new double [] {0, 0, 0.5, 0.5, 0, 0, 0, 0};
HiddenMarkovModel model = new HiddenMarkovModel(A, B, pi);
model.Learn(sequences, 0.0001);
if(model.Evaluate(input) >= 0.5){
return true;
}else{
return false;
}
}
开发者ID:jstasiak,项目名称:r2d2_assignment,代码行数:56,代码来源:HMMRecognizer.cs
示例15: ComputeBeta
/// <summary>
/// Compute beta.
/// </summary>
/// <param name="hmm">The HMM.</param>
/// <param name="oseq">The sequence.</param>
protected new void ComputeBeta(HiddenMarkovModel hmm, IMLDataSet oseq)
{
Beta = EngineArray.AllocateDouble2D((int) oseq.Count, hmm.StateCount);
for (int i = 0; i < hmm.StateCount; i++)
{
Beta[oseq.Count - 1][i] = 1.0/_ctFactors[oseq.Count - 1];
}
for (var t = (int) (oseq.Count - 2); t >= 0; t--)
{
for (int i = 0; i < hmm.StateCount; i++)
{
ComputeBetaStep(hmm, oseq[t + 1], t, i);
Beta[t][i] /= _ctFactors[t];
}
}
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:23,代码来源:ForwardBackwardScaledCalculator.cs
示例16: ComputeAlpha
/// <summary>
/// Compute alpha.
/// </summary>
/// <param name="hmm">The HMM.</param>
/// <param name="seq">The sequence.</param>
protected void ComputeAlpha(HiddenMarkovModel hmm,
IMLDataSet seq)
{
Alpha = EngineArray.AllocateDouble2D((int) seq.Count, hmm.StateCount);
for (int i = 0; i < hmm.StateCount; i++)
{
ComputeAlphaInit(hmm, seq[0], i);
}
Scale(_ctFactors, Alpha, 0);
IEnumerator<IMLDataPair> seqIterator = seq.GetEnumerator();
if (seqIterator.MoveNext())
{
for (int t = 1; t < seq.Count; t++)
{
seqIterator.MoveNext();
IMLDataPair observation = seqIterator.Current;
for (int i = 0; i < hmm.StateCount; i++)
{
ComputeAlphaStep(hmm, observation, t, i);
}
Scale(_ctFactors, Alpha, t);
}
}
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:32,代码来源:ForwardBackwardScaledCalculator.cs
示例17: ComputeStep
private void ComputeStep(HiddenMarkovModel hmm, IMLDataPair o,
int t, int j)
{
double minDelta = Double.PositiveInfinity;
int min_psy = 0;
for (int i = 0; i < hmm.StateCount; i++)
{
double thisDelta = this.delta[t - 1][i]
- Math.Log(hmm.TransitionProbability[i][j]);
if (minDelta > thisDelta)
{
minDelta = thisDelta;
min_psy = i;
}
}
this.delta[t][j] = minDelta
- Math.Log(hmm.StateDistributions[j].Probability(o));
this.psy[t][j] = min_psy;
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:22,代码来源:ViterbiCalculator.cs
示例18: OptimizeCluster
/// <summary>
/// Optimize the clusters.
/// </summary>
/// <param name="hmm">The HMM.</param>
/// <returns>True if the cluster was not modified.</returns>
private bool OptimizeCluster(HiddenMarkovModel hmm)
{
bool result = false;
foreach (IMLDataSet obsSeq in _training.Sequences)
{
var vc = new ViterbiCalculator(obsSeq, hmm);
int[] states = vc.CopyStateSequence();
for (int i = 0; i < states.Length; i++)
{
IMLDataPair o = obsSeq[i];
if (_clusters.Cluster(o) != states[i])
{
result = true;
_clusters.Remove(o, _clusters.Cluster(o));
_clusters.Put(o, states[i]);
}
}
}
return !result;
}
开发者ID:benw408701,项目名称:MLHCTransactionPredictor,代码行数:29,代码来源:TrainKMeans.cs
示例19: LearnTransition
/// <summary>
/// Learn the state transitions.
/// </summary>
/// <param name="hmm">The HMM.</param>
private void LearnTransition(HiddenMarkovModel hmm)
{
for (int i = 0; i < hmm.StateCount; i++)
{
for (int j = 0; j < hmm.StateCount; j++)
{
hmm.TransitionProbability[i][j] = 0.0;
}
}
foreach (IMLDataSet obsSeq in _training.Sequences)
{
if (obsSeq.Count < 2)
{
continue;
}
int secondState = _clusters.Cluster(obsSeq[0]);
for (int i = 1; i < obsSeq.Count; i++)
{
int firstState = secondState;
secondState = _clusters.Cluster(obsSeq[i]);
hmm.TransitionProbability[firstState][secondState] =
hmm.TransitionProbability[firstState][secondState] + 1.0;
}
}
/* Normalize Aij array */
for (int i = 0; i < hmm.StateCount; i++)
{
double sum = 0;
for (int j = 0; j < hmm.StateCount; j++)
{
sum += hmm.TransitionProbability[i][j];
}
if (sum == 0.0)
{
for (int j = 0; j < hmm.StateCount; j++)
{
hmm.TransitionProbability[i][j] = 1.0/hmm.StateCount;
}
}
else
{
for (int j = 0; j < hmm.StateCount; j++)
{
hmm.TransitionProbability[i][j] /= sum;
}
}
}
}
开发者ID:benw408701,项目名称:MLHCTransactionPredictor,代码行数:58,代码来源:TrainKMeans.cs
示例20: LearnPi
/// <summary>
/// Learn Pi, the starting probabilities.
/// </summary>
/// <param name="hmm">The HMM.</param>
private void LearnPi(HiddenMarkovModel hmm)
{
var pi = new double[_states];
for (int i = 0; i < _states; i++)
{
pi[i] = 0.0;
}
foreach (IMLDataSet sequence in _training.Sequences)
{
pi[_clusters.Cluster(sequence[0])]++;
}
for (int i = 0; i < _states; i++)
{
hmm.Pi[i] = pi[i]/(int) _training.Count;
}
}
开发者ID:benw408701,项目名称:MLHCTransactionPredictor,代码行数:23,代码来源:TrainKMeans.cs
注:本文中的HiddenMarkovModel类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论