本文整理汇总了C#中numl.Math.LinearAlgebra.Vector类的典型用法代码示例。如果您正苦于以下问题:C# Vector类的具体用法?C# Vector怎么用?C# Vector使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Vector类属于numl.Math.LinearAlgebra命名空间,在下文中一共展示了Vector类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: Generate
/// <summary>Generate Logistic Regression model based on a set of examples.</summary>
/// <param name="x">The Matrix to process.</param>
/// <param name="y">The Vector to process.</param>
/// <returns>Model.</returns>
public override IModel Generate(Matrix x, Vector y)
{
// create initial theta
Matrix copy = x.Copy();
copy = PreProcessing.FeatureDimensions.IncreaseDimensions(copy, PolynomialFeatures);
// add intercept term
copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);
Vector theta = Vector.Ones(copy.Cols);
var run = numl.Math.Optimization.GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new numl.Math.Functions.Cost.LogisticCostFunction(),
this.Lambda, new numl.Math.Functions.Regularization.Regularization());
LogisticRegressionModel model = new LogisticRegressionModel()
{
Descriptor = this.Descriptor,
Theta = run.Item2,
LogisticFunction = new Math.Functions.Logistic(),
PolynomialFeatures = this.PolynomialFeatures
};
return model;
}
开发者ID:m-abubakar,项目名称:numl,代码行数:29,代码来源:LogisticRegressionGenerator.cs
示例2: Predict
/// <summary>Predicts the given o.</summary>
/// <exception cref="InvalidOperationException">Thrown when the requested operation is invalid.</exception>
/// <param name="y">The Vector to process.</param>
/// <returns>An object.</returns>
public override double Predict(Vector y)
{
if (this.Root == null || this.Descriptor == null)
{
throw new InvalidOperationException("Invalid Model - Missing information");
}
var lp = Vector.Zeros(this.Root.Probabilities.Length);
for (var i = 0; i < this.Root.Probabilities.Length; i++)
{
var stat = this.Root.Probabilities[i];
lp[i] = Math.Log(stat.Probability);
for (var j = 0; j < y.Length; j++)
{
var conditional = stat.Conditionals[j];
var p = conditional.GetStatisticFor(y[j]);
// check for missing range, assign bad probability
lp[i] += Math.Log(p == null ? 10e-10 : p.Probability);
}
}
var idx = lp.MaxIndex();
return this.Root.Probabilities[idx].X.Min;
}
开发者ID:ChewyMoon,项目名称:Cupcake,代码行数:29,代码来源:NaiveBayesModel.cs
示例3: Generate
/// <summary>Generate model based on a set of examples.</summary>
/// <param name="x">The Matrix to process.</param>
/// <param name="y">The Vector to process.</param>
/// <returns>Model.</returns>
public override IModel Generate(Matrix x, Vector y)
{
// because I said so...
if (this.MaxIterations == -1)
{
this.MaxIterations = x.Rows * 1000;
}
var network = Network.Default(this.Descriptor, x, y, this.Activation);
var model = new NeuralNetworkModel { Descriptor = this.Descriptor, Network = network };
this.OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));
for (var i = 0; i < this.MaxIterations; i++)
{
var idx = i % x.Rows;
network.Forward(x[idx, VectorType.Row]);
// OnModelChanged(this, ModelEventArgs.Make(model, "Forward"));
network.Back(y[idx], this.LearningRate);
var output = string.Format("Run ({0}/{1})", i, this.MaxIterations);
this.OnModelChanged(this, ModelEventArgs.Make(model, output));
}
return model;
}
开发者ID:ChewyMoon,项目名称:Cupcake,代码行数:29,代码来源:NeuralNetworkGenerator.cs
示例4: Run
/// <summary>
/// Performs gradient descent to optomise theta parameters.
/// </summary>
/// <param name="theta">Initial Theta (Zeros)</param>
/// <param name="x">Training set</param>
/// <param name="y">Training labels</param>
/// <param name="maxIterations">Maximum number of iterations to run gradient descent</param>
/// <param name="learningRateAlpha">The learning rate (Alpha)</param>
/// <param name="costFunction">Cost function to use for gradient descent</param>
/// <param name="lambda">The regularization constant to apply</param>
/// <param name="regularizer">The regularization function to apply</param>
/// <returns></returns>
public static Tuple<double, Vector> Run(
Vector theta,
Matrix x,
Vector y,
int maxIterations,
double learningRateAlpha,
ICostFunction costFunction,
double lambda,
IRegularizer regularizer)
{
var bestTheta = theta.Copy();
var bestCost = double.PositiveInfinity;
double currentCost = 0;
var currentGradient = theta.Copy();
for (var i = 0; i <= maxIterations; i++)
{
currentCost = costFunction.ComputeCost(bestTheta, x, y, lambda, regularizer);
currentGradient = costFunction.ComputeGradient(bestTheta, x, y, lambda, regularizer);
if (currentCost < bestCost)
{
bestTheta = bestTheta - learningRateAlpha * currentGradient;
bestCost = currentCost;
}
else
{
learningRateAlpha = learningRateAlpha * 0.99;
}
}
return new Tuple<double, Vector>(bestCost, bestTheta);
}
开发者ID:ChewyMoon,项目名称:Cupcake,代码行数:46,代码来源:GradientDescent.cs
示例5: Test_SVM_Email_Classification
//[Fact]
public void Test_SVM_Email_Classification()
{
var training_Data = System.IO.File.ReadAllLines("Data\\Emails\\Training_Data.txt")
.Select(s => s.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).ToVector(f => double.Parse(f.Trim()))).ToMatrix();
var training_Labels = System.IO.File.ReadAllLines("Data\\Emails\\Training_Labels.txt")
.Select(s => double.Parse(s.Trim())).ToVector();
var test_Data = System.IO.File.ReadAllLines("Data\\Emails\\Test_Data.txt")
.Select(s => s.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).ToVector(f => double.Parse(f.Trim()))).ToMatrix();
var test_Labels = System.IO.File.ReadAllLines("Data\\Emails\\Test_Labels.txt")
.Select(s => double.Parse(s.Trim())).ToVector();
var generator = new Supervised.SVM.SVMGenerator()
{
C = 0.1,
MaxIterations = 5,
SelectionFunction = new Supervised.SVM.Selection.RandomSetSelection()
};
var model = generator.Generate(training_Data, training_Labels);
Vector predictions = new Vector(test_Labels.Count());
for (int x = 0; x < test_Labels.Count(); x++)
{
predictions[x] = model.Predict(test_Data[x]);
}
var score = numl.Supervised.Score.ScorePredictions(predictions, test_Labels);
Console.WriteLine($"SVM Model\n: { score }");
}
开发者ID:sethjuarez,项目名称:numl,代码行数:34,代码来源:SVMTests.cs
示例6: Prod
/// <summary>Products the given v.</summary>
/// <param name="v">A variable-length parameters list containing v.</param>
/// <returns>A double.</returns>
public static double Prod(Vector v)
{
var prod = v[0];
for (int i = 1; i < v.Length; i++)
prod *= v[i];
return prod;
}
开发者ID:m-abubakar,项目名称:numl,代码行数:10,代码来源:VectorStatics.cs
示例7: Sum
/// <summary>Sums the given v.</summary>
/// <param name="v">A variable-length parameters list containing v.</param>
/// <returns>A double.</returns>
public static double Sum(Vector v)
{
double sum = 0;
for (int i = 0; i < v.Length; i++)
sum += v[i];
return sum;
}
开发者ID:m-abubakar,项目名称:numl,代码行数:10,代码来源:VectorStatics.cs
示例8: Logistic_Regression_Test_CostFunction_1
public void Logistic_Regression_Test_CostFunction_1()
{
Matrix X = new[,]
{{ 1, 1, 1 },
{ 1, 1, 1 },
{ 1, 1, 1 },
{ 8, 1, 6 },
{ 3, 5 ,7 },
{ 4, 9, 2 }};
Vector y = new Vector(new double[] { 1, 0, 1, 0, 1, 0 });
Vector theta = new Vector(new double[] { 0, 1, 0 });
ICostFunction logisticCostFunction = new LogisticCostFunction()
{
X = X,
Y = y,
Lambda = 3,
Regularizer = new L2Regularizer()
};
double cost = logisticCostFunction.ComputeCost(theta.Copy());
theta = logisticCostFunction.ComputeGradient(theta.Copy());
Assert.Equal(2.2933d, System.Math.Round(cost, 4));
Assert.Equal(1.6702d, System.Math.Round(theta[0], 4));
Assert.Equal(2.1483d, System.Math.Round(theta[1], 4));
Assert.Equal(1.0887d, System.Math.Round(theta[2], 4));
}
开发者ID:sethjuarez,项目名称:numl,代码行数:31,代码来源:LogisticRegressionTests.cs
示例9: Diag
public static Matrix Diag(Vector v)
{
Matrix m = Matrix.Zeros(v.Length);
for (int i = 0; i < v.Length; i++)
m[i, i] = v[i];
return m;
}
开发者ID:budbjames,项目名称:numl,代码行数:7,代码来源:VectorStatics.cs
示例10: ContainsNaN
public static bool ContainsNaN(Vector vector)
{
for (int i = 0; i < vector.Length; i++)
if(double.IsNaN(vector[i]))
return true;
return false;
}
开发者ID:vladtepes1473,项目名称:numl,代码行数:7,代码来源:VectorStatics.cs
示例11: SegmentedConditional
/// <summary>
/// Calculates segmented conditional impurity of y | x When stipulating segments (s), X is broken
/// up into s many segments therefore P(X=x_s) becomes a range probability rather than a fixed
/// probability. In essence the average over H(Y|X = x) becomes SUM_s [ p_s * H(Y|X = x_s) ]. The
/// values that were used to do the split are stored in the Splits member.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown when the requested operation is invalid.</exception>
/// <param name="y">Target impurity.</param>
/// <param name="x">Conditioned impurity.</param>
/// <param name="segments">Number of segments over x to condition upon.</param>
/// <returns>Segmented conditional impurity measure.</returns>
public double SegmentedConditional(Vector y, Vector x, int segments)
{
if (x == null && y == null)
throw new InvalidOperationException("x and y do not exist!");
return SegmentedConditional(y, x, x.Segment(segments));
}
开发者ID:m-abubakar,项目名称:numl,代码行数:18,代码来源:Impurity.cs
示例12: Conditional
/// <summary>
/// Calculates conditional impurity of y | x
/// R(Y|X) is the average of H(Y|X = x) over all possible values
/// X may take.
/// </summary>
/// <param name="y">Target impurity</param>
/// <param name="x">Conditioned impurity</param>
/// <param name="width">Split of values over x to condition upon</param>
/// <returns>Conditional impurity measure</returns>
public double Conditional(Vector y, Vector x)
{
if (x == null && y == null)
throw new InvalidOperationException("x and y do not exist!");
double p = 0, // probability of slice
h = 0, // impurity of y | x_i : ith slice
result = 0, // aggregated sum
count = x.Count(); // total items in list
var values = x.Distinct().OrderBy(z => z); // distinct values to split on
Segments = values.Select(z => Range.Make(z, z)).ToArray();
Discrete = true;
// for each distinct value
// calculate conditional impurity
// and aggregate results
foreach (var i in values)
{
// get slice
var s = x.Indices(d => d == i);
// slice probability
p = (double)s.Count() / (double)count;
// impurity of (y | x_i)
h = Calculate(y.Slice(s));
// sum up
result += p * h;
}
return result;
}
开发者ID:budbjames,项目名称:numl,代码行数:41,代码来源:Impurity.cs
示例13: Test_Vector_Slicing_With_Indices
public void Test_Vector_Slicing_With_Indices(IEnumerable<double> source, IEnumerable<int> indices, IEnumerable<double> truth)
{
var x = new Vector(source);
var t = new Vector(truth);
var slice = x.Slice(indices);
Assert.AreEqual(t, slice);
}
开发者ID:m-abubakar,项目名称:numl,代码行数:7,代码来源:HelperTests.cs
示例14: Generate
/// <summary>Generate Linear Regression model based on a set of examples.</summary>
/// <param name="x">The Matrix to process.</param>
/// <param name="y">The Vector to process.</param>
/// <returns>Model.</returns>
public override IModel Generate(Matrix x, Vector y)
{
// create initial theta
Vector theta = Vector.Ones(x.Cols + 1);
Matrix copy = x.Copy();
// normalise features
for (int i = 0; i < copy.Cols; i++)
{
var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
for (int k = 0; k < copy.Rows; k++)
{
copy[k, i] = j[k];
}
}
// add intercept term
copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);
// run gradient descent
var run = GradientDescent.Run(theta, copy, y, MaxIterations, LearningRate, new LinearCostFunction(),
Lambda, new Regularization());
// once converged create model and apply theta
LinearRegressionModel model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
{
Descriptor = Descriptor,
Theta = run.Item2
};
return model;
}
开发者ID:m-abubakar,项目名称:numl,代码行数:37,代码来源:LinearRegressionGenerator.cs
示例15: Optimizer
/// <summary>
/// Initializes a new Optimizer using the default values.
/// <param name="theta">Theta to optimize.</param>
/// <param name="maxIterations">Maximum number of iterations.</param>
/// <param name="learningRate">Learning Rate (alpha) (Optional).</param>
/// <param name="momentum">Momentum parameter for use in accelerated methods (Optional).</param>
/// <param name="optimizationMethod">Type of optimization method to use (Optional).</param>
/// <param name="optimizer">An external typed optimization method to use (Optional).</param>
/// </summary>
public Optimizer(Vector theta, int maxIterations, double learningRate = 1.0, double momentum = 0.9,
OptimizationMethods optimizationMethod = OptimizationMethods.StochasticGradientDescent, OptimizationMethod optimizer = null)
{
this.Completed = false;
if (optimizationMethod != OptimizationMethods.External)
{
switch (optimizationMethod)
{
case OptimizationMethods.FastGradientDescent: optimizer = new numl.Math.Optimization.Methods.GradientDescent.FastGradientDescent() { Momentum = momentum }; break;
case OptimizationMethods.StochasticGradientDescent: optimizer = new numl.Math.Optimization.Methods.GradientDescent.StochasticGradientDescent(); break;
case OptimizationMethods.NAGDescent: optimizer = new numl.Math.Optimization.Methods.GradientDescent.NAGDescent() { Momentum = momentum }; break;
}
}
this.OpimizationMethod = optimizer;
this.Properties = new OptimizerProperties()
{
Iteration = 0,
MaxIterations = maxIterations,
Cost = double.MaxValue,
Gradient = Vector.Zeros(theta.Length),
Theta = theta,
LearningRate = learningRate,
Momentum = momentum
};
}
开发者ID:sethjuarez,项目名称:numl,代码行数:36,代码来源:Optimizer.cs
示例16: Predict
public override double Predict(Vector y)
{
if (Normalized)
y = y / y.Norm();
return W.Dot(y) + B;
}
开发者ID:vladtepes1473,项目名称:numl,代码行数:7,代码来源:PerceptronModel.cs
示例17: Calc
public static Vector Calc(Vector v, Func<int, double, double> f)
{
var result = v.Copy();
for (int i = 0; i < v.Length; i++)
result[i] = f(i, result[i]);
return result;
}
开发者ID:budbjames,项目名称:numl,代码行数:7,代码来源:VectorStatics.cs
示例18: Logistic_Regression_Test_CostFunction_2_WithoutRegularization
public void Logistic_Regression_Test_CostFunction_2_WithoutRegularization()
{
Matrix X = new[,] {
{ 8, 1, 6 },
{ 3, 5 ,7 },
{ 4, 9, 2 }};
Vector y = new Vector(new double[] { 1, 1, 0 });
Vector theta = new Vector(new double[] { 0, 1, 0 });
ICostFunction logisticCostFunction = new LogisticCostFunction()
{
X = X,
Y = y,
Lambda = 0,
};
double cost = logisticCostFunction.ComputeCost(theta.Copy());
theta = logisticCostFunction.ComputeGradient(theta.Copy());
Assert.Equal(3.1067d, System.Math.Round(cost, 4));
Assert.Equal(0.6093d, System.Math.Round(theta[0], 4));
Assert.Equal(2.8988d, System.Math.Round(theta[1], 4));
Assert.Equal(0.1131d, System.Math.Round(theta[2], 4));
}
开发者ID:sethjuarez,项目名称:numl,代码行数:27,代码来源:LogisticRegressionTests.cs
示例19: Save_And_Load_LogisticRegression
public void Save_And_Load_LogisticRegression()
{
Matrix m = new[,] {
{ 0.0512670, 0.6995600 },
{ -0.0927420, 0.6849400 },
{ -0.2137100, 0.6922500 },
{ -0.3750000, 0.5021900 },
{ -0.5132500, 0.4656400 },
{ -0.5247700, 0.2098000 },
{ -0.3980400, 0.0343570 },
{ -0.3058800, -0.1922500 },
{ 0.0167050, -0.4042400 },
{ 0.1319100, -0.5138900 },
{ -0.6111800, -0.0679820 },
{ -0.6630200, -0.2141800 },
{ -0.5996500, -0.4188600 },
{ -0.7263800, -0.0826020 },
{ -0.8300700, 0.3121300 },
{ -0.7206200, 0.5387400 },
{ -0.5938900, 0.4948800 },
{ -0.4844500, 0.9992700 },
{ -0.0063364, 0.9992700 },
{ 0.6326500, -0.0306120 },
};
Vector y = new Vector(new double[] {
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
});
var generator = new LogisticRegressionGenerator() { Lambda = 1, LearningRate = 0.01, PolynomialFeatures = 6, MaxIterations = 400 };
var model = generator.Generate(m, y) as LogisticRegressionModel;
Serialize(model);
var lmodel = Deserialize<LogisticRegressionModel>();
Assert.AreEqual(model.Theta, lmodel.Theta);
Assert.AreEqual(model.PolynomialFeatures, lmodel.PolynomialFeatures);
Assert.AreEqual(model.LogisticFunction.GetType(), lmodel.LogisticFunction.GetType());
}
开发者ID:m-abubakar,项目名称:numl,代码行数:59,代码来源:RegressionSerializationTests.cs
示例20: Default
/// <summary>Defaults.</summary>
/// <param name="d">The Descriptor to process.</param>
/// <param name="x">The Vector to process.</param>
/// <param name="y">The Vector to process.</param>
/// <param name="activation">The activation.</param>
/// <returns>A Network.</returns>
public static Network Default(Descriptor d, Matrix x, Vector y, IFunction activation)
{
var nn = new Network();
// set output to number of choices of available
// 1 if only two choices
var distinct = y.Distinct().Count();
var output = distinct > 2 ? distinct : 1;
// identity funciton for bias nodes
IFunction ident = new Ident();
// set number of hidden units to (Input + Hidden) * 2/3 as basic best guess.
var hidden = (int)Math.Ceiling((decimal)(x.Cols + output) * 2m / 3m);
// creating input nodes
nn.In = new Node[x.Cols + 1];
nn.In[0] = new Node { Label = "B0", Activation = ident };
for (var i = 1; i < x.Cols + 1; i++)
{
nn.In[i] = new Node { Label = d.ColumnAt(i - 1), Activation = ident };
}
// creating hidden nodes
var h = new Node[hidden + 1];
h[0] = new Node { Label = "B1", Activation = ident };
for (var i = 1; i < hidden + 1; i++)
{
h[i] = new Node { Label = string.Format("H{0}", i), Activation = activation };
}
// creating output nodes
nn.Out = new Node[output];
for (var i = 0; i < output; i++)
{
nn.Out[i] = new Node { Label = GetLabel(i, d), Activation = activation };
}
// link input to hidden. Note: there are
// no inputs to the hidden bias node
for (var i = 1; i < h.Length; i++)
{
for (var j = 0; j < nn.In.Length; j++)
{
Edge.Create(nn.In[j], h[i]);
}
}
// link from hidden to output (full)
for (var i = 0; i < nn.Out.Length; i++)
{
for (var j = 0; j < h.Length; j++)
{
Edge.Create(h[j], nn.Out[i]);
}
}
return nn;
}
开发者ID:ChewyMoon,项目名称:Cupcake,代码行数:65,代码来源:Network.cs
注:本文中的numl.Math.LinearAlgebra.Vector类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论