本文整理汇总了C++中VectorValues类的典型用法代码示例。如果您正苦于以下问题:C++ VectorValues类的具体用法?C++ VectorValues怎么用?C++ VectorValues使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了VectorValues类的17个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: TEST
/* ************************************************************************* */
TEST( ISAM, iSAM_smoother )
{
Ordering ordering;
for (int t = 1; t <= 7; t++) ordering += X(t);
// Create smoother with 7 nodes
GaussianFactorGraph smoother = createSmoother(7);
// run iSAM for every factor
GaussianISAM actual;
for(boost::shared_ptr<GaussianFactor> factor: smoother) {
GaussianFactorGraph factorGraph;
factorGraph.push_back(factor);
actual.update(factorGraph);
}
// Create expected Bayes Tree by solving smoother with "natural" ordering
GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering);
// Verify sigmas in the bayes tree
for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) {
GaussianConditional::shared_ptr conditional = clique->conditional();
EXPECT(!conditional->get_model());
}
// Check whether BayesTree is correct
EXPECT(assert_equal(GaussianFactorGraph(expected).augmentedHessian(), GaussianFactorGraph(actual).augmentedHessian()));
// obtain solution
VectorValues e; // expected solution
for (int t = 1; t <= 7; t++) e.insert(X(t), Vector::Zero(2));
VectorValues optimized = actual.optimize(); // actual solution
EXPECT(assert_equal(e, optimized));
}
开发者ID:haidai,项目名称:gtsam,代码行数:35,代码来源:testGaussianISAM.cpp
示例2: TEST
/* ************************************************************************* */
TEST(GaussianFactorGraph, multiplyHessianAdd2) {
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
// brute force
Matrix AtA;
Vector eta;
boost::tie(AtA, eta) = gfg.hessian();
Vector X(6);
X << 1, 2, 3, 4, 5, 6;
Vector Y(6);
Y << -450, -450, 300, 400, 2950, 3450;
EXPECT(assert_equal(Y, AtA * X));
VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));
VectorValues expected;
expected.insert(0, Vector2(-450, -450));
expected.insert(1, Vector2(300, 400));
expected.insert(2, Vector2(2950, 3450));
VectorValues actual;
gfg.multiplyHessianAdd(1.0, x, actual);
EXPECT(assert_equal(expected, actual));
// now, do it with non-zero y
gfg.multiplyHessianAdd(1.0, x, actual);
EXPECT(assert_equal(2 * expected, actual));
}
开发者ID:haidai,项目名称:gtsam,代码行数:29,代码来源:testGaussianFactorGraph.cpp
示例3:
VectorValues KeyInfo::x0() const {
VectorValues result;
BOOST_FOREACH ( const KeyInfo::value_type &item, *this ) {
result.insert(item.first, Vector::Zero(item.second.dim()));
}
return result;
}
开发者ID:exoter-rover,项目名称:slam-gtsam,代码行数:7,代码来源:IterativeSolver.cpp
示例4: gttic
/* ************************************************************************* */
VectorValues GaussianFactorGraph::optimizeGradientSearch() const
{
gttic(GaussianFactorGraph_optimizeGradientSearch);
gttic(Compute_Gradient);
// Compute gradient (call gradientAtZero function, which is defined for various linear systems)
VectorValues grad = gradientAtZero();
double gradientSqNorm = grad.dot(grad);
gttoc(Compute_Gradient);
gttic(Compute_Rg);
// Compute R * g
Errors Rg = *this * grad;
gttoc(Compute_Rg);
gttic(Compute_minimizing_step_size);
// Compute minimizing step size
double step = -gradientSqNorm / dot(Rg, Rg);
gttoc(Compute_minimizing_step_size);
gttic(Compute_point);
// Compute steepest descent point
grad *= step;
gttoc(Compute_point);
return grad;
}
开发者ID:DForger,项目名称:gtsam,代码行数:28,代码来源:GaussianFactorGraph.cpp
示例5: endParents
/* ************************************************************************* */
VectorValues GaussianConditional::solve(const VectorValues& x) const
{
// Concatenate all vector values that correspond to parent variables
const Vector xS = x.vector(FastVector<Key>(beginParents(), endParents()));
// Update right-hand-side
const Vector rhs = get_d() - get_S() * xS;
// Solve matrix
const Vector solution = get_R().triangularView<Eigen::Upper>().solve(rhs);
// Check for indeterminant solution
if (solution.hasNaN()) {
throw IndeterminantLinearSystemException(keys().front());
}
// Insert solution into a VectorValues
VectorValues result;
DenseIndex vectorPosition = 0;
for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
result.insert(*frontal, solution.segment(vectorPosition, getDim(frontal)));
vectorPosition += getDim(frontal);
}
return result;
}
开发者ID:haidai,项目名称:gtsam,代码行数:27,代码来源:GaussianConditional.cpp
示例6: TEST
/* ************************************************************************* */
TEST(VectorValues, resizeLike) {
// insert, with out-of-order indices
VectorValues original;
original.insert(0, Vector_(1, 1.0));
original.insert(1, Vector_(2, 2.0, 3.0));
original.insert(5, Vector_(2, 6.0, 7.0));
original.insert(2, Vector_(2, 4.0, 5.0));
VectorValues actual(10, 3);
actual.resizeLike(original);
// Check dimensions
LONGS_EQUAL(6, actual.size());
LONGS_EQUAL(7, actual.dim());
LONGS_EQUAL(1, actual.dim(0));
LONGS_EQUAL(2, actual.dim(1));
LONGS_EQUAL(2, actual.dim(2));
LONGS_EQUAL(2, actual.dim(5));
// Logic
EXPECT(actual.exists(0));
EXPECT(actual.exists(1));
EXPECT(actual.exists(2));
EXPECT(!actual.exists(3));
EXPECT(!actual.exists(4));
EXPECT(actual.exists(5));
EXPECT(!actual.exists(6));
// Check exceptions
CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument);
}
开发者ID:gburachas,项目名称:gtsam_pcl,代码行数:32,代码来源:testVectorValues.cpp
示例7: TEST
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate2) {
Matrix A01 = I_3x3;
Vector3 b0(1.5, 1.5, 1.5);
Vector3 s0(1.6, 1.6, 1.6);
Matrix A10 = 2.0 * I_3x3;
Matrix A11 = -2.0 * I_3x3;
Vector3 b1(2.5, 2.5, 2.5);
Vector3 s1(2.6, 2.6, 2.6);
Matrix A21 = 3.0 * I_3x3;
Vector3 b2(3.5, 3.5, 3.5);
Vector3 s2(3.6, 3.6, 3.6);
GaussianFactorGraph gfg;
gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
Matrix93 A0, A1;
A0 << A10, Z_3x3, Z_3x3;
A1 << A11, A01, A21;
Vector9 b, sigmas;
b << b1, b0, b2;
sigmas << s1, s0, s2;
// create a full, uneliminated version of the factor
JacobianFactor jacobian(0, A0, 1, A1, b,
noiseModel::Diagonal::Sigmas(sigmas, true));
// Make sure combining works
HessianFactor hessian(gfg);
EXPECT(assert_equal(HessianFactor(jacobian), hessian, 1e-6));
EXPECT(
assert_equal(jacobian.augmentedInformation(),
hessian.augmentedInformation(), 1e-9));
// perform elimination on jacobian
Ordering ordering = list_of(0);
GaussianConditional::shared_ptr expectedConditional;
JacobianFactor::shared_ptr expectedFactor;
boost::tie(expectedConditional, expectedFactor) = //
jacobian.eliminate(ordering);
// Eliminate
GaussianConditional::shared_ptr actualConditional;
HessianFactor::shared_ptr actualHessian;
boost::tie(actualConditional, actualHessian) = //
EliminateCholesky(gfg, ordering);
EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
VectorValues v;
v.insert(1, Vector3(1, 2, 3));
EXPECT_DOUBLES_EQUAL(expectedFactor->error(v), actualHessian->error(v), 1e-9);
EXPECT(
assert_equal(expectedFactor->augmentedInformation(),
actualHessian->augmentedInformation(), 1e-9));
EXPECT(assert_equal(HessianFactor(*expectedFactor), *actualHessian, 1e-6));
}
开发者ID:exoter-rover,项目名称:slam-gtsam,代码行数:60,代码来源:testHessianFactor.cpp
示例8: TEST
/* ************************************************************************* */
TEST(LPSolver, LinearCost) {
LinearCost cost(1, Vector3(2., 4., 6.));
VectorValues x;
x.insert(1, Vector3(1., 3., 5.));
double error = cost.error(x);
double expectedError = 44.0;
DOUBLES_EQUAL(expectedError, error, 1e-100);
}
开发者ID:haidai,项目名称:gtsam,代码行数:9,代码来源:testLPSolver.cpp
示例9: error
/* ************************************************************************* */
double HessianFactor::error(const VectorValues& c) const {
// error 0.5*(f - 2*x'*g + x'*G*x)
const double f = constantTerm();
const double xtg = c.vector().dot(linearTerm());
const double xGx = c.vector().transpose() * info_.range(0, this->size(), 0, this->size()).selfadjointView<Eigen::Upper>() * c.vector();
return 0.5 * (f - 2.0 * xtg + xGx);
}
开发者ID:gburachas,项目名称:gtsam_pcl,代码行数:9,代码来源:HessianFactor.cpp
示例10: BOOST_FOREACH
/* ************************************************************************* */
VectorValues GaussianFactorGraph::gradientAtZero() const {
// Zero-out the gradient
VectorValues g;
BOOST_FOREACH(const sharedFactor& factor, *this) {
VectorValues gi = factor->gradientAtZero();
g.addInPlace_(gi);
}
return g;
}
开发者ID:DForger,项目名称:gtsam,代码行数:10,代码来源:GaussianFactorGraph.cpp
示例11: TEST
/* ************************************************************************* */
TEST(GaussianBayesNet, ComputeSteepestDescentPoint) {
// Create an arbitrary Bayes Net
GaussianBayesNet gbn;
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(),
3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(),
4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(),
2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(),
4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(),
3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(),
4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()));
// Compute the Hessian numerically
Matrix hessian = numericalHessian<Vector10>(
boost::bind(&computeError, gbn, _1), Vector10::Zero());
// Compute the gradient numerically
Vector gradient = numericalGradient<Vector10>(
boost::bind(&computeError, gbn, _1), Vector10::Zero());
// Compute the gradient using dense matrices
Matrix augmentedHessian = GaussianFactorGraph(gbn).augmentedHessian();
LONGS_EQUAL(11, (long)augmentedHessian.cols());
Vector denseMatrixGradient = -augmentedHessian.col(10).segment(0,10);
EXPECT(assert_equal(gradient, denseMatrixGradient, 1e-5));
// Compute the steepest descent point
double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0);
Vector expected = gradient * step;
// Compute the steepest descent point with the dogleg function
VectorValues actual = gbn.optimizeGradientSearch();
// Check that points agree
FastVector<Key> keys = list_of(0)(1)(2)(3)(4);
Vector actualAsVector = actual.vector(keys);
EXPECT(assert_equal(expected, actualAsVector, 1e-5));
// Check that point causes a decrease in error
double origError = GaussianFactorGraph(gbn).error(VectorValues::Zero(actual));
double newError = GaussianFactorGraph(gbn).error(actual);
EXPECT(newError < origError);
}
开发者ID:exoter-rover,项目名称:slam-gtsam,代码行数:53,代码来源:testGaussianBayesNet.cpp
示例12: TEST
/* ************************************************************************* */
TEST( testLinearContainerFactor, jacobian_factor_withlinpoints ) {
Matrix A1 = (Matrix(2, 2) <<
2.74222, -0.0067457,
0.0, 2.63624);
Matrix A2 = (Matrix(2, 2) <<
-0.0455167, -0.0443573,
-0.0222154, -0.102489);
Vector b = (Vector(2) << 0.0277052,
-0.0533393);
JacobianFactor expLinFactor(l1, A1, l2, A2, b, diag_model2);
Values values;
values.insert(l1, landmark1);
values.insert(l2, landmark2);
values.insert(x1, poseA1);
values.insert(x2, poseA2);
LinearContainerFactor actFactor(expLinFactor, values);
LinearContainerFactor actFactorNolin(expLinFactor);
EXPECT(assert_equal(actFactor, actFactor, tol));
EXPECT(assert_inequal(actFactor, actFactorNolin, tol));
EXPECT(assert_inequal(actFactorNolin, actFactor, tol));
// Check contents
Values expLinPoint;
expLinPoint.insert(l1, landmark1);
expLinPoint.insert(l2, landmark2);
CHECK(actFactor.linearizationPoint());
EXPECT(actFactor.hasLinearizationPoint());
EXPECT(assert_equal(expLinPoint, *actFactor.linearizationPoint()));
// Check error evaluation
Vector delta_l1 = (Vector(2) << 1.0, 2.0);
Vector delta_l2 = (Vector(2) << 3.0, 4.0);
VectorValues delta = values.zeroVectors();
delta.at(l1) = delta_l1;
delta.at(l2) = delta_l2;
Values noisyValues = values.retract(delta);
double expError = expLinFactor.error(delta);
EXPECT_DOUBLES_EQUAL(expError, actFactor.error(noisyValues), tol);
EXPECT_DOUBLES_EQUAL(expLinFactor.error(values.zeroVectors()), actFactor.error(values), tol);
// Check linearization with corrections for updated linearization point
GaussianFactor::shared_ptr actLinearizationB = actFactor.linearize(noisyValues);
Vector bprime = b - A1 * delta_l1 - A2 * delta_l2;
JacobianFactor expLinFactor2(l1, A1, l2, A2, bprime, diag_model2);
EXPECT(assert_equal(*expLinFactor2.clone(), *actLinearizationB, tol));
}
开发者ID:DForger,项目名称:gtsam,代码行数:53,代码来源:testLinearContainerFactor.cpp
示例13: DynamicValuesMismatched
/* ************************************************************************* */
VectorValues Values::localCoordinates(const Values& cp) const {
if(this->size() != cp.size())
throw DynamicValuesMismatched();
VectorValues result;
for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) {
if(it1->key != it2->key)
throw DynamicValuesMismatched(); // If keys do not match
// Will throw a dynamic_cast exception if types do not match
// NOTE: this is separate from localCoordinates(cp, ordering, result) due to at() vs. insert
result.insert(it1->key, it1->value.localCoordinates_(it2->value));
}
return result;
}
开发者ID:exoter-rover,项目名称:slam-gtsam,代码行数:14,代码来源:Values.cpp
示例14: TEST
/* ************************************************************************* */
TEST( SubgraphPreconditioner, planarGraph )
{
// Check planar graph construction
GaussianFactorGraph A;
VectorValues xtrue;
boost::tie(A, xtrue) = planarGraph(3);
LONGS_EQUAL(13,A.size());
LONGS_EQUAL(9,xtrue.size());
DOUBLES_EQUAL(0,error(A,xtrue),1e-9); // check zero error for xtrue
// Check that xtrue is optimal
GaussianBayesNet::shared_ptr R1 = GaussianSequentialSolver(A).eliminate();
VectorValues actual = optimize(*R1);
CHECK(assert_equal(xtrue,actual));
}
开发者ID:malcolmreynolds,项目名称:GTSAM,代码行数:16,代码来源:testSubgraphPreconditioner.cpp
示例15: retract
/* ************************************************************************* */
Values Values::retract(const VectorValues& delta) const
{
Values result;
for(const_iterator key_value = begin(); key_value != end(); ++key_value) {
VectorValues::const_iterator vector_item = delta.find(key_value->key);
Key key = key_value->key; // Non-const duplicate to deal with non-const insert argument
if(vector_item != delta.end()) {
const Vector& singleDelta = vector_item->second;
Value* retractedValue(key_value->value.retract_(singleDelta)); // Retract
result.values_.insert(key, retractedValue); // Add retracted result directly to result values
} else {
result.values_.insert(key, key_value->value.clone_()); // Add original version to result values
}
}
return result;
}
开发者ID:exoter-rover,项目名称:slam-gtsam,代码行数:19,代码来源:Values.cpp
示例16: TEST
/* ************************************************************************* */
TEST(HessianFactor, gradientAtZero)
{
Matrix G11 = (Matrix(1, 1) << 1);
Matrix G12 = (Matrix(1, 2) << 0, 0);
Matrix G22 = (Matrix(2, 2) << 1, 0, 0, 1);
Vector g1 = (Vector(1) << -7);
Vector g2 = (Vector(2) << -8, -9);
double f = 194;
HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f);
// test gradient at zero
VectorValues expectedG = pair_list_of<Key, Vector>(0, -g1) (1, -g2);
Matrix A; Vector b; boost::tie(A,b) = factor.jacobian();
FastVector<Key> keys; keys += 0,1;
EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys)));
VectorValues actualG = factor.gradientAtZero();
EXPECT(assert_equal(expectedG, actualG));
}
开发者ID:DForger,项目名称:gtsam,代码行数:20,代码来源:testHessianFactor.cpp
示例17: TEST
/* ************************************************************************* */
TEST(DoglegOptimizer, ComputeBlend) {
// Create an arbitrary Bayes Net
GaussianBayesNet gbn;
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(),
3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(),
4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(),
2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(),
4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(),
3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(),
4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()));
gbn += GaussianConditional::shared_ptr(new GaussianConditional(
4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()));
// Compute steepest descent point
VectorValues xu = gbn.optimizeGradientSearch();
// Compute Newton's method point
VectorValues xn = gbn.optimize();
// The Newton's method point should be more "adventurous", i.e. larger, than the steepest descent point
EXPECT(xu.vector().norm() < xn.vector().norm());
// Compute blend
double Delta = 1.5;
VectorValues xb = DoglegOptimizerImpl::ComputeBlend(Delta, xu, xn);
DOUBLES_EQUAL(Delta, xb.vector().norm(), 1e-10);
}
开发者ID:exoter-rover,项目名称:slam-gtsam,代码行数:35,代码来源:testDoglegOptimizer.cpp
注:本文中的VectorValues类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论