10 #ifndef EIGEN_SPARSE_BLOCK_H 11 #define EIGEN_SPARSE_BLOCK_H 15 template<
typename XprType,
int BlockRows,
int BlockCols>
16 class BlockImpl<XprType,BlockRows,BlockCols,true,Sparse>
24 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
26 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
30 typedef typename BlockImpl::Index Index;
32 inline InnerIterator(
const BlockType& xpr, Index outer)
33 : XprType::InnerIterator(xpr.m_matrix, xpr.m_outerStart + outer), m_outer(outer)
35 inline Index row()
const {
return IsRowMajor ? m_outer : this->index(); }
36 inline Index col()
const {
return IsRowMajor ? this->index() : m_outer; }
40 class ReverseInnerIterator:
public XprType::ReverseInnerIterator
42 typedef typename BlockImpl::Index Index;
44 inline ReverseInnerIterator(
const BlockType& xpr, Index outer)
45 : XprType::ReverseInnerIterator(xpr.m_matrix, xpr.m_outerStart + outer), m_outer(outer)
47 inline Index row()
const {
return IsRowMajor ? m_outer : this->index(); }
48 inline Index col()
const {
return IsRowMajor ? this->index() : m_outer; }
53 inline BlockImpl(
const XprType& xpr,
int i)
54 : m_matrix(xpr), m_outerStart(i), m_outerSize(OuterSize)
57 inline BlockImpl(
const XprType& xpr,
int startRow,
int startCol,
int blockRows,
int blockCols)
58 : m_matrix(xpr), m_outerStart(IsRowMajor ? startRow : startCol), m_outerSize(IsRowMajor ? blockRows : blockCols)
61 inline const Scalar coeff(
int row,
int col)
const 63 return m_matrix.coeff(row + IsRowMajor ? m_outerStart : 0, col +IsRowMajor ? 0 : m_outerStart);
66 inline const Scalar coeff(
int index)
const 68 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
71 EIGEN_STRONG_INLINE Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
72 EIGEN_STRONG_INLINE Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
76 typename XprType::Nested m_matrix;
80 EIGEN_INHERIT_ASSIGNMENT_OPERATORS(
BlockImpl)
82 Index nonZeros()
const;
90 template<
typename _Scalar,
int _Options,
typename _Index,
int BlockRows,
int BlockCols>
92 :
public SparseMatrixBase<Block<SparseMatrix<_Scalar, _Options, _Index>,BlockRows,BlockCols,true> >
100 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
102 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
111 inline Index row()
const {
return IsRowMajor ? m_outer : this->index(); }
112 inline Index col()
const {
return IsRowMajor ? this->index() : m_outer; }
119 inline ReverseInnerIterator(
const BlockType& xpr, Index outer)
122 inline Index row()
const {
return IsRowMajor ? m_outer : this->index(); }
123 inline Index col()
const {
return IsRowMajor ? this->index() : m_outer; }
128 inline BlockImpl(
const SparseMatrixType& xpr,
int i)
129 : m_matrix(xpr), m_outerStart(i), m_outerSize(OuterSize)
132 inline BlockImpl(
const SparseMatrixType& xpr,
int startRow,
int startCol,
int blockRows,
int blockCols)
133 : m_matrix(xpr), m_outerStart(IsRowMajor ? startRow : startCol), m_outerSize(IsRowMajor ? blockRows : blockCols)
136 template<
typename OtherDerived>
140 _NestedMatrixType& matrix =
const_cast<_NestedMatrixType&
>(m_matrix);;
149 Index start = m_outerStart==0 ? 0 : matrix.outerIndexPtr()[m_outerStart];
150 Index end = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
151 Index block_size = end - start;
152 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] - end;
154 Index free_size = m_matrix.isCompressed()
155 ? Index(matrix.data().allocatedSize()) + block_size
163 std::memcpy(&newdata.value(0), &m_matrix.data().value(0), start*
sizeof(
Scalar));
164 std::memcpy(&newdata.index(0), &m_matrix.data().index(0), start*
sizeof(Index));
166 std::memcpy(&newdata.value(start), &tmp.data().value(0), nnz*
sizeof(
Scalar));
167 std::memcpy(&newdata.index(start), &tmp.data().index(0), nnz*
sizeof(Index));
169 std::memcpy(&newdata.value(start+nnz), &matrix.data().value(end), tail_size*
sizeof(
Scalar));
170 std::memcpy(&newdata.index(start+nnz), &matrix.data().index(end), tail_size*
sizeof(Index));
172 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
174 matrix.data().swap(newdata);
179 matrix.data().resize(start + nnz + tail_size);
181 std::memmove(&matrix.data().value(start+nnz), &matrix.data().value(end), tail_size*
sizeof(
Scalar));
182 std::memmove(&matrix.data().index(start+nnz), &matrix.data().index(end), tail_size*
sizeof(Index));
184 std::memcpy(&matrix.data().value(start), &tmp.data().value(0), nnz*
sizeof(
Scalar));
185 std::memcpy(&matrix.data().index(start), &tmp.data().index(0), nnz*
sizeof(Index));
189 if(!m_matrix.isCompressed())
190 for(Index j=0; j<m_outerSize.value(); ++j)
191 matrix.innerNonZeroPtr()[m_outerStart+j] = tmp.
innerVector(j).nonZeros();
195 for(Index k=0; k<m_outerSize.value(); ++k)
197 matrix.outerIndexPtr()[m_outerStart+k] = p;
200 std::ptrdiff_t offset = nnz - block_size;
201 for(Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
203 matrix.outerIndexPtr()[k] += offset;
209 inline BlockType& operator=(
const BlockType& other)
211 return operator=<BlockType>(other);
214 inline const Scalar* valuePtr()
const 215 {
return m_matrix.valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
216 inline Scalar* valuePtr()
217 {
return m_matrix.const_cast_derived().valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
219 inline const Index* innerIndexPtr()
const 220 {
return m_matrix.innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
221 inline Index* innerIndexPtr()
222 {
return m_matrix.const_cast_derived().innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
224 inline const Index* outerIndexPtr()
const 225 {
return m_matrix.outerIndexPtr() + m_outerStart; }
226 inline Index* outerIndexPtr()
227 {
return m_matrix.const_cast_derived().outerIndexPtr() + m_outerStart; }
229 Index nonZeros()
const 231 if(m_matrix.isCompressed())
232 return std::size_t(m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()])
233 - std::size_t(m_matrix.outerIndexPtr()[m_outerStart]);
234 else if(m_outerSize.value()==0)
240 inline Scalar& coeffRef(
int row,
int col)
242 return m_matrix.const_cast_derived().coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
245 inline const Scalar coeff(
int row,
int col)
const 247 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
250 inline const Scalar coeff(
int index)
const 252 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
255 const Scalar& lastCoeff()
const 257 EIGEN_STATIC_ASSERT_VECTOR_ONLY(
BlockImpl);
258 eigen_assert(nonZeros()>0);
259 if(m_matrix.isCompressed())
260 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
262 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
265 EIGEN_STRONG_INLINE Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
266 EIGEN_STRONG_INLINE Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
270 typename SparseMatrixType::Nested m_matrix;
277 template<
typename _Scalar,
int _Options,
typename _Index,
int BlockRows,
int BlockCols>
279 :
public SparseMatrixBase<Block<const SparseMatrix<_Scalar, _Options, _Index>,BlockRows,BlockCols,true> >
286 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
288 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
297 inline Index row()
const {
return IsRowMajor ? m_outer : this->index(); }
298 inline Index col()
const {
return IsRowMajor ? this->index() : m_outer; }
305 inline ReverseInnerIterator(
const BlockType& xpr, Index outer)
308 inline Index row()
const {
return IsRowMajor ? m_outer : this->index(); }
309 inline Index col()
const {
return IsRowMajor ? this->index() : m_outer; }
314 inline BlockImpl(
const SparseMatrixType& xpr,
int i)
315 : m_matrix(xpr), m_outerStart(i), m_outerSize(OuterSize)
318 inline BlockImpl(
const SparseMatrixType& xpr,
int startRow,
int startCol,
int blockRows,
int blockCols)
319 : m_matrix(xpr), m_outerStart(IsRowMajor ? startRow : startCol), m_outerSize(IsRowMajor ? blockRows : blockCols)
322 inline const Scalar* valuePtr()
const 323 {
return m_matrix.valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
325 inline const Index* innerIndexPtr()
const 326 {
return m_matrix.innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
328 inline const Index* outerIndexPtr()
const 329 {
return m_matrix.outerIndexPtr() + m_outerStart; }
331 Index nonZeros()
const 333 if(m_matrix.isCompressed())
334 return std::size_t(m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()])
335 - std::size_t(m_matrix.outerIndexPtr()[m_outerStart]);
336 else if(m_outerSize.value()==0)
342 inline const Scalar coeff(
int row,
int col)
const 344 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
347 inline const Scalar coeff(
int index)
const 349 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
352 const Scalar& lastCoeff()
const 354 EIGEN_STATIC_ASSERT_VECTOR_ONLY(
BlockImpl);
355 eigen_assert(nonZeros()>0);
356 if(m_matrix.isCompressed())
357 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
359 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
362 EIGEN_STRONG_INLINE Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
363 EIGEN_STRONG_INLINE Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
367 EIGEN_INHERIT_ASSIGNMENT_OPERATORS(
BlockImpl)
369 typename SparseMatrixType::Nested m_matrix;
379 template<
typename Derived>
386 template<
typename Derived>
393 template<
typename Derived>
398 IsRowMajor ? outerStart : 0, IsRowMajor ? 0 : outerStart,
399 IsRowMajor ? outerSize : rows(), IsRowMajor ? cols() : outerSize);
406 template<
typename Derived>
411 IsRowMajor ? outerStart : 0, IsRowMajor ? 0 : outerStart,
412 IsRowMajor ? outerSize : rows(), IsRowMajor ? cols() : outerSize);
419 template<
typename XprType,
int BlockRows,
int BlockCols,
bool InnerPanel>
420 class BlockImpl<XprType,BlockRows,BlockCols,InnerPanel,Sparse>
427 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
433 m_startRow( (BlockRows==1) && (BlockCols==XprType::ColsAtCompileTime) ? i : 0),
434 m_startCol( (BlockRows==XprType::RowsAtCompileTime) && (BlockCols==1) ? i : 0),
435 m_blockRows(BlockRows==1 ? 1 : xpr.rows()),
436 m_blockCols(BlockCols==1 ? 1 : xpr.cols())
441 inline BlockImpl(
const XprType& xpr,
int startRow,
int startCol,
int blockRows,
int blockCols)
442 : m_matrix(xpr), m_startRow(startRow), m_startCol(startCol), m_blockRows(blockRows), m_blockCols(blockCols)
445 inline int rows()
const {
return m_blockRows.value(); }
446 inline int cols()
const {
return m_blockCols.value(); }
448 inline Scalar& coeffRef(
int row,
int col)
450 return m_matrix.const_cast_derived()
451 .coeffRef(row + m_startRow.value(), col + m_startCol.value());
454 inline const Scalar coeff(
int row,
int col)
const 456 return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
459 inline Scalar& coeffRef(
int index)
461 return m_matrix.const_cast_derived()
462 .coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
463 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
466 inline const Scalar coeff(
int index)
const 469 .coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
470 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
477 typedef typename _MatrixTypeNested::InnerIterator
Base;
483 : Base(block.derived().nestedExpression(), outer + (IsRowMajor ? block.m_startRow.value() : block.m_startCol.value())),
485 m_end(IsRowMajor ? block.m_startCol.value()+block.m_blockCols.value() : block.m_startRow.value()+block.m_blockRows.value())
487 while( (Base::operator
bool()) && (Base::index() < (IsRowMajor ? m_block.m_startCol.value() : m_block.m_startRow.value())) )
491 inline Index index()
const {
return Base::index() - (IsRowMajor ? m_block.m_startCol.value() : m_block.m_startRow.value()); }
492 inline Index outer()
const {
return Base::outer() - (IsRowMajor ? m_block.m_startRow.value() : m_block.m_startCol.value()); }
493 inline Index row()
const {
return Base::row() - m_block.m_startRow.value(); }
494 inline Index col()
const {
return Base::col() - m_block.m_startCol.value(); }
496 inline operator bool()
const {
return Base::operator bool() && Base::index() < m_end; }
498 class ReverseInnerIterator :
public _MatrixTypeNested::ReverseInnerIterator
500 typedef typename _MatrixTypeNested::ReverseInnerIterator
Base;
505 EIGEN_STRONG_INLINE ReverseInnerIterator(
const BlockType& block, Index outer)
506 : Base(block.derived().nestedExpression(), outer + (IsRowMajor ? block.m_startRow.value() : block.m_startCol.value())),
508 m_begin(IsRowMajor ? block.m_startCol.value() : block.m_startRow.value())
510 while( (Base::operator
bool()) && (Base::index() >= (IsRowMajor ? m_block.m_startCol.value()+block.m_blockCols.value() : m_block.m_startRow.value()+block.m_blockRows.value())) )
514 inline Index index()
const {
return Base::index() - (IsRowMajor ? m_block.m_startCol.value() : m_block.m_startRow.value()); }
515 inline Index outer()
const {
return Base::outer() - (IsRowMajor ? m_block.m_startRow.value() : m_block.m_startCol.value()); }
516 inline Index row()
const {
return Base::row() - m_block.m_startRow.value(); }
517 inline Index col()
const {
return Base::col() - m_block.m_startCol.value(); }
519 inline operator bool()
const {
return Base::operator bool() && Base::index() >= m_begin; }
523 friend class ReverseInnerIterator;
525 EIGEN_INHERIT_ASSIGNMENT_OPERATORS(
BlockImpl)
527 typename XprType::Nested m_matrix;
533 Index nonZeros()
const;
538 #endif // EIGEN_SPARSE_BLOCK_H Definition: gtest_unittest.cc:5031
A versatible sparse matrix representation.
Definition: SparseMatrix.h:85
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:104
iterative scaling algorithm to equilibrate rows and column norms in matrices
Definition: TestIMU_Common.h:87
Definition: XprHelper.h:32
Index nonZeros() const
Definition: SparseMatrix.h:246
Base class of any sparse matrices or sparse expressions.
Definition: ForwardDeclarations.h:239
InnerVectorReturnType innerVector(Index outer)
Definition: SparseBlock.h:380
Definition: SparseMatrix.h:872
InnerVectorsReturnType innerVectors(Index outerStart, Index outerSize)
Definition: SparseBlock.h:395
An InnerIterator allows to loop over the element of a sparse (or dense) matrix or expression...
Expression of a fixed-size or dynamic-size block.
Definition: Block.h:103
BlockImpl(const XprType &xpr, int startRow, int startCol, int blockRows, int blockCols)
Dynamic-size constructor.
Definition: SparseBlock.h:441
Definition: SparseMatrix.h:905
Definition: ForwardDeclarations.h:17
double Scalar
Common scalar type.
Definition: FlexibleKalmanBase.h:48