27#include "sparse_optimizer.h"
37template <
typename Traits>
48template <
typename Traits>
56 assert(_sizePoses > 0 &&
"allocating with wrong size");
70 _coefficientsMutex.resize(numPoseBlocks);
75template <
typename Traits>
85 const uint04 size =
solver.optimizer.indexMapping().size();
86 for (
uint04 i = 0; i < size; ++i)
106 resize(scratch, sparseDim);
111 for (
uint04 i = 0; i <
solver.optimizer.indexMapping().size(); ++i)
117 PoseMatrixType* m =
_Hpp.block(poseIdx, poseIdx,
true);
125 LandmarkMatrixType* m =
_Hll.block(landmarkIdx, landmarkIdx,
true);
132 assert(poseIdx == _numPoses && landmarkIdx ==
_numLandmarks);
143 for (
uint04 i = 0; i <
solver.optimizer.activeEdges().size(); i++)
153 int indexV1Bak = ind1;
161 bool transposedBlock = ind1 > ind2;
163 std::swap(ind1, ind2);
165 PoseMatrixType* m =
_Hpp.block(ind1, ind2,
true);
170 schurMatrixLookup->
addBlock(ind1, ind2);
175 LandmarkMatrixType* m =
_Hll.block(ind1 - _numPoses, ind2 - _numPoses,
true);
207 for (
uint04 i = 0; i <
solver.optimizer.indexMapping().size(); ++i) {
212 for (
auto it1 = vedges.begin(); it1 != vedges.end(); ++it1)
214 for (
uint04 i = 0; i < (*it1)->vertexCount(); ++i)
219 for (
auto it2 = vedges.begin(); it2 != vedges.end(); ++it2) {
220 for (
uint04 j = 0; j < (*it2)->vertexCount(); ++j)
228 schurMatrixLookup->
addBlock(i1, i2);
237 _Hschur.takePatternFromHash(*schurMatrixLookup);
238 delete schurMatrixLookup;
244template <
typename Traits>
247 for (
auto vit : vset)
255 _Hpp.rowBlockIndices().add(_sizePoses);
256 _Hpp.colBlockIndices().add(_sizePoses);
257 _Hpp.blockCols().add(
typename SparseBlockMatrix<PoseMatrixType>::IntBlockMap());
260 PoseMatrixType* m =
_Hpp.block(ind, ind,
true);
271 for (
auto it = edges.begin(); it != edges.end(); ++it)
279 int indexV1Bak = ind1;
289 bool transposedBlock = ind1 > ind2;
291 std::swap(ind1, ind2);
295 PoseMatrixType* m =
_Hpp.block(ind1, ind2,
true);
318template <
typename Traits>
328 for (
uint04 i = 0; i < _numPoses; ++i)
330 PoseMatrixType *
b=
_Hpp.block(i,i);
333 b->diagonal().array() += lambda;
340 LandmarkMatrixType *
b=
_Hll.block(i,i);
343 b->diagonal().array() += lambda;
348template <
typename Traits>
353 for (
uint04 i = 0; i < _numPoses; ++i) {
354 PoseMatrixType *
b=
_Hpp.block(i,i);
358 LandmarkMatrixType *
b=
_Hll.block(i,i);
363template <
typename Traits>
void resize(IndexScratch &scratch, int totalDim)
Resizes all internal matrices and vectors for the given problem size.
Buffer< g_type > _bschur
Right-hand side for the Schur complement system.
SparseBlockMatrixCCS< PoseLandmarkMatrixType > _HplCCS
CCS view of the pose-landmark Hessian.
Buffer< g_type > _coefficients
Coefficient buffer for Schur elimination.
LinearSolverType solver
The underlying linear solver.
SparseBlockMatrix< PoseMatrixType > _Hpp
Pose-pose Hessian block.
bool _doSchur
Whether to use the Schur complement.
SparseBlockMatrix< LandmarkMatrixType > _Hll
Landmark-landmark Hessian block.
SparseBlockMatrixCCS< PoseMatrixType > _HschurTransposedCCS
Transposed CCS view of the Schur complement.
bool buildStructure(bool zeroBlocks, IndexScratch &scratch)
Builds the sparse block matrix structure from the graph.
SparseBlockMatrix< PoseLandmarkMatrixType > _Hpl
Pose-landmark Hessian block.
bool updateStructure(const Buffer< HyperGraph::HGVertex * > &vset, const Set< HyperGraph::HGVertex * > &edges)
Updates the structure after vertices or edges have changed.
SparseBlockMatrix< PoseMatrixType > _Hschur
Schur complement of the Hessian.
Buffer< PoseVectorType > _diagonalBackupPose
Backup of pose Hessian diagonal.
bool setLambda(g_type lambda, bool backup=false)
Adds a damping factor to the diagonal of the Hessian.
void restoreDiagonal()
Restores the Hessian diagonal from the backup.
uint04 _sizeLandmarks
Total scalar dimensions of poses and landmarks.
BlockSolver()
Default constructor.
bool init(bool online=false)
Initializes the block solver.
SparseBlockMatrixDiagonal< LandmarkMatrixType > _DInvSchur
Inverse diagonal of the landmark Hessian.
Buffer< LandmarkVectorType > _diagonalBackupLandmark
Backup of landmark Hessian diagonal.
uint04 _numLandmarks
Number of pose and landmark blocks.
The equivelent of std::vector but with a bit more control.
void add(t_type &&object)
Adds object to the end of the buffer.
virtual const HGVertex * vertex(uint04 i) const =0
Returns a const pointer to the i-th vertex.
virtual uint04 vertexCount() const =0
Returns the number of vertices connected by this edge.
const Buffer< HGEdge * > & edges() const
returns the set of hyper-edges that are leaving/entering in this vertex
Base edge class for the optimizable graph, adding error computation and robust kernels.
virtual void mapHessianMemory(g_type *d, int i, int j, bool rowMajor)=0
maps the internal matrix to some external memory location, you need to provide the memory before call...
A general case Vertex for optimization.
virtual void mapHessianMemory(g_type *d)=0
maps the internal matrix to some external memory location
void setColInHessian(int c)
set the row of this vertex in the Hessian
virtual sint04 dimension() const =0
dimension of the estimated state belonging to this node
int hessianIndex() const
temporary index of this node in the parameter vector obtained from linearization
bool marginalized() const
true => this node is marginalized out during the optimization
Container that stores unique elements in no particular order, and which allow for fast retrieval or i...
g_type * b()
return b, the right hand side of the system
void resizeVector(uint04 sx)
Resizes the solution and right-hand side vectors.
Solver()
Default constructor.
Sparse matrix which uses blocks.
Sparse matrix which uses blocks on the diagonal.
Sparse matrix which uses blocks based on hash structures.
MatrixType * addBlock(int r, uint04 c, bool zeroBlock=false)
add a block to the pattern, return a pointer to the added block
const Buffer< SparseColumn > & columns() const
the block matrices per block-column
some general case utility functions
The primary namespace for the NDEVR SDK.
uint32_t uint04
-Defines an alias representing a 4 byte, unsigned integer -Can represent exact integer values 0 throu...
Scratch buffers for block index assignments during structure building.
Buffer< int > block_pose_indices
Block indices for pose vertices.
Buffer< int > block_landmark_indices
Block indices for landmark vertices.