10 #ifndef EIGEN_SPARSEMATRIX_H 11 #define EIGEN_SPARSEMATRIX_H 42 template<
typename _Scalar,
int _Options,
typename _Index>
45 typedef _Scalar Scalar;
46 typedef _Index StorageIndex;
55 SupportedAccessPatterns = InnerRandomAccessPattern
59 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
66 typedef _Scalar Scalar;
68 typedef _Index StorageIndex;
73 ColsAtCompileTime = 1,
75 MaxColsAtCompileTime = 1,
80 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
82 :
public traits<Diagonal<SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
91 template<
typename _Scalar,
int _Options,
typename _Index>
96 using Base::convert_index;
99 using Base::isCompressed;
100 using Base::nonZeros;
102 using Base::operator+=;
103 using Base::operator-=;
112 using Base::IsRowMajor;
125 StorageIndex* m_outerIndex;
126 StorageIndex* m_innerNonZeros;
132 inline Index rows()
const {
return IsRowMajor ? m_outerSize : m_innerSize; }
134 inline Index cols()
const {
return IsRowMajor ? m_innerSize : m_outerSize; }
144 inline const Scalar*
valuePtr()
const {
return m_data.valuePtr(); }
148 inline Scalar*
valuePtr() {
return m_data.valuePtr(); }
153 inline const StorageIndex*
innerIndexPtr()
const {
return m_data.indexPtr(); }
178 inline Storage& data() {
return m_data; }
180 inline const Storage& data()
const {
return m_data; }
186 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
188 const Index outer = IsRowMajor ? row : col;
189 const Index inner = IsRowMajor ? col : row;
190 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
191 return m_data.
atInRange(m_outerIndex[outer], end, StorageIndex(inner));
204 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
206 const Index outer = IsRowMajor ? row : col;
207 const Index inner = IsRowMajor ? col : row;
209 Index start = m_outerIndex[outer];
210 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
211 eigen_assert(end>=start &&
"you probably called coeffRef on a non finalized matrix");
213 return insert(row,col);
215 if((p<end) && (m_data.index(p)==inner))
216 return m_data.value(p);
218 return insert(row,col);
250 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
252 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
260 eigen_assert(isCompressed() &&
"This function does not make sense in non compressed mode.");
261 m_data.reserve(reserveSize);
264 #ifdef EIGEN_PARSED_BY_DOXYGEN 277 template<
class SizesType>
278 inline void reserve(
const SizesType& reserveSizes);
280 template<
class SizesType>
281 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::value_type& enableif =
282 #
if (!EIGEN_COMP_MSVC) || (EIGEN_COMP_MSVC>=1500)
285 SizesType::value_type())
287 EIGEN_UNUSED_VARIABLE(enableif);
288 reserveInnerVectors(reserveSizes);
290 #endif // EIGEN_PARSED_BY_DOXYGEN 292 template<
class SizesType>
293 inline void reserveInnerVectors(
const SizesType& reserveSizes)
297 Index totalReserveSize = 0;
299 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
300 if (!m_innerNonZeros) internal::throw_std_bad_alloc();
303 StorageIndex* newOuterIndex = m_innerNonZeros;
305 StorageIndex count = 0;
306 for(
Index j=0; j<m_outerSize; ++j)
308 newOuterIndex[j] = count;
309 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
310 totalReserveSize += reserveSizes[j];
312 m_data.reserve(totalReserveSize);
313 StorageIndex previousOuterIndex = m_outerIndex[m_outerSize];
314 for(
Index j=m_outerSize-1; j>=0; --j)
316 StorageIndex innerNNZ = previousOuterIndex - m_outerIndex[j];
317 for(
Index i=innerNNZ-1; i>=0; --i)
319 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
320 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
322 previousOuterIndex = m_outerIndex[j];
323 m_outerIndex[j] = newOuterIndex[j];
324 m_innerNonZeros[j] = innerNNZ;
326 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
328 m_data.resize(m_outerIndex[m_outerSize]);
332 StorageIndex* newOuterIndex =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+1)*
sizeof(StorageIndex)));
333 if (!newOuterIndex) internal::throw_std_bad_alloc();
335 StorageIndex count = 0;
336 for(
Index j=0; j<m_outerSize; ++j)
338 newOuterIndex[j] = count;
339 StorageIndex alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
340 StorageIndex toReserve = std::max<StorageIndex>(reserveSizes[j], alreadyReserved);
341 count += toReserve + m_innerNonZeros[j];
343 newOuterIndex[m_outerSize] = count;
345 m_data.resize(count);
346 for(
Index j=m_outerSize-1; j>=0; --j)
348 Index offset = newOuterIndex[j] - m_outerIndex[j];
351 StorageIndex innerNNZ = m_innerNonZeros[j];
352 for(
Index i=innerNNZ-1; i>=0; --i)
354 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
355 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
360 std::swap(m_outerIndex, newOuterIndex);
361 std::free(newOuterIndex);
379 inline Scalar& insertBack(
Index row,
Index col)
381 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
386 inline Scalar& insertBackByOuterInner(
Index outer,
Index inner)
388 eigen_assert(
Index(m_outerIndex[outer+1]) == m_data.size() &&
"Invalid ordered insertion (invalid outer index)");
389 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) &&
"Invalid ordered insertion (invalid inner index)");
390 Index p = m_outerIndex[outer+1];
391 ++m_outerIndex[outer+1];
392 m_data.append(Scalar(0), inner);
393 return m_data.value(p);
398 inline Scalar& insertBackByOuterInnerUnordered(
Index outer,
Index inner)
400 Index p = m_outerIndex[outer+1];
401 ++m_outerIndex[outer+1];
402 m_data.append(Scalar(0), inner);
403 return m_data.value(p);
408 inline void startVec(
Index outer)
410 eigen_assert(m_outerIndex[outer]==
Index(m_data.size()) &&
"You must call startVec for each inner vector sequentially");
411 eigen_assert(m_outerIndex[outer+1]==0 &&
"You must call startVec for each inner vector sequentially");
412 m_outerIndex[outer+1] = m_outerIndex[outer];
418 inline void finalize()
422 StorageIndex size = internal::convert_index<StorageIndex>(m_data.size());
423 Index i = m_outerSize;
425 while (i>=0 && m_outerIndex[i]==0)
428 while (i<=m_outerSize)
430 m_outerIndex[i] = size;
438 template<
typename InputIterators>
439 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end);
441 template<
typename InputIterators,
typename DupFunctor>
442 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end, DupFunctor dup_func);
446 template<
typename DupFunctor>
447 void collapseDuplicates(DupFunctor dup_func = DupFunctor());
455 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
465 eigen_internal_assert(m_outerIndex!=0 && m_outerSize>0);
467 Index oldStart = m_outerIndex[1];
468 m_outerIndex[1] = m_innerNonZeros[0];
469 for(
Index j=1; j<m_outerSize; ++j)
471 Index nextOldStart = m_outerIndex[j+1];
472 Index offset = oldStart - m_outerIndex[j];
475 for(
Index k=0; k<m_innerNonZeros[j]; ++k)
477 m_data.index(m_outerIndex[j]+k) = m_data.index(oldStart+k);
478 m_data.value(m_outerIndex[j]+k) = m_data.value(oldStart+k);
481 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
482 oldStart = nextOldStart;
484 std::free(m_innerNonZeros);
486 m_data.resize(m_outerIndex[m_outerSize]);
493 if(m_innerNonZeros != 0)
495 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
496 for (
Index i = 0; i < m_outerSize; i++)
498 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
505 prune(default_prunning_func(reference,epsilon));
515 template<
typename KeepFunc>
516 void prune(
const KeepFunc& keep = KeepFunc())
522 for(
Index j=0; j<m_outerSize; ++j)
524 Index previousStart = m_outerIndex[j];
526 Index end = m_outerIndex[j+1];
527 for(
Index i=previousStart; i<end; ++i)
529 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
531 m_data.value(k) = m_data.value(i);
532 m_data.index(k) = m_data.index(i);
537 m_outerIndex[m_outerSize] = k;
552 if (this->rows() == rows && this->cols() == cols)
return;
555 if(rows==0 || cols==0)
return resize(rows,cols);
557 Index innerChange = IsRowMajor ? cols - this->cols() : rows - this->rows();
558 Index outerChange = IsRowMajor ? rows - this->rows() : cols - this->cols();
559 StorageIndex newInnerSize = convert_index(IsRowMajor ? cols : rows);
565 StorageIndex *newInnerNonZeros =
static_cast<StorageIndex*
>(std::realloc(m_innerNonZeros, (m_outerSize + outerChange) *
sizeof(StorageIndex)));
566 if (!newInnerNonZeros) internal::throw_std_bad_alloc();
567 m_innerNonZeros = newInnerNonZeros;
569 for(
Index i=m_outerSize; i<m_outerSize+outerChange; i++)
570 m_innerNonZeros[i] = 0;
572 else if (innerChange < 0)
575 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+outerChange+1) *
sizeof(StorageIndex)));
576 if (!m_innerNonZeros) internal::throw_std_bad_alloc();
577 for(
Index i = 0; i < m_outerSize; i++)
578 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
582 if (m_innerNonZeros && innerChange < 0)
584 for(
Index i = 0; i < m_outerSize + (std::min)(outerChange,
Index(0)); i++)
586 StorageIndex &n = m_innerNonZeros[i];
587 StorageIndex start = m_outerIndex[i];
588 while (n > 0 && m_data.index(start+n-1) >= newInnerSize) --n;
592 m_innerSize = newInnerSize;
595 if (outerChange == 0)
598 StorageIndex *newOuterIndex =
static_cast<StorageIndex*
>(std::realloc(m_outerIndex, (m_outerSize + outerChange + 1) *
sizeof(StorageIndex)));
599 if (!newOuterIndex) internal::throw_std_bad_alloc();
600 m_outerIndex = newOuterIndex;
603 StorageIndex last = m_outerSize == 0 ? 0 : m_outerIndex[m_outerSize];
604 for(
Index i=m_outerSize; i<m_outerSize+outerChange+1; i++)
605 m_outerIndex[i] = last;
607 m_outerSize += outerChange;
619 const Index outerSize = IsRowMajor ? rows : cols;
620 m_innerSize = IsRowMajor ? cols : rows;
622 if (m_outerSize != outerSize || m_outerSize==0)
624 std::free(m_outerIndex);
625 m_outerIndex =
static_cast<StorageIndex*
>(std::malloc((outerSize + 1) *
sizeof(StorageIndex)));
626 if (!m_outerIndex) internal::throw_std_bad_alloc();
628 m_outerSize = outerSize;
632 std::free(m_innerNonZeros);
635 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
640 void resizeNonZeros(
Index size)
646 const ConstDiagonalReturnType
diagonal()
const {
return ConstDiagonalReturnType(*
this); }
652 DiagonalReturnType
diagonal() {
return DiagonalReturnType(*
this); }
656 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
658 check_template_parameters();
664 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
666 check_template_parameters();
671 template<
typename OtherDerived>
673 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
676 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
677 check_template_parameters();
680 *
this = other.derived();
683 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 684 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
686 internal::call_assignment_no_alias(*
this, other.derived());
691 template<
typename OtherDerived,
unsigned int UpLo>
693 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
695 check_template_parameters();
696 Base::operator=(other);
701 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
703 check_template_parameters();
704 *
this = other.derived();
708 template<
typename OtherDerived>
710 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
712 check_template_parameters();
713 initAssignment(other);
718 template<
typename OtherDerived>
720 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
722 check_template_parameters();
723 *
this = other.derived();
731 std::swap(m_outerIndex, other.m_outerIndex);
732 std::swap(m_innerSize, other.m_innerSize);
733 std::swap(m_outerSize, other.m_outerSize);
734 std::swap(m_innerNonZeros, other.m_innerNonZeros);
735 m_data.swap(other.m_data);
742 eigen_assert(rows() == cols() &&
"ONLY FOR SQUARED MATRICES");
743 this->m_data.resize(rows());
747 std::free(m_innerNonZeros);
752 if (other.isRValue())
754 swap(other.const_cast_derived());
756 else if(
this!=&other)
758 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 759 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
761 initAssignment(other);
764 internal::smart_copy(other.m_outerIndex, other.m_outerIndex + m_outerSize + 1, m_outerIndex);
765 m_data = other.m_data;
769 Base::operator=(other);
775 #ifndef EIGEN_PARSED_BY_DOXYGEN 776 template<
typename OtherDerived>
778 {
return Base::operator=(other.
derived()); }
779 #endif // EIGEN_PARSED_BY_DOXYGEN 781 template<
typename OtherDerived>
784 friend std::ostream & operator << (std::ostream & s,
const SparseMatrix& m)
787 s <<
"Nonzero entries:\n";
791 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
797 Index p = m.m_outerIndex[i];
798 Index pe = m.m_outerIndex[i]+m.m_innerNonZeros[i];
801 s <<
"(" << m.m_data.value(k) <<
"," << m.m_data.index(k) <<
") ";
803 for (; k<m.m_outerIndex[i+1]; ++k) {
810 s <<
"Outer pointers:\n";
812 s << m.m_outerIndex[i] <<
" ";
814 s <<
" $" << std::endl;
817 s <<
"Inner non zeros:\n";
819 s << m.m_innerNonZeros[i] <<
" ";
821 s <<
" $" << std::endl;
825 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
832 std::free(m_outerIndex);
833 std::free(m_innerNonZeros);
839 # ifdef EIGEN_SPARSEMATRIX_PLUGIN 840 # include EIGEN_SPARSEMATRIX_PLUGIN 845 template<
typename Other>
846 void initAssignment(
const Other& other)
848 resize(other.rows(), other.cols());
851 std::free(m_innerNonZeros);
858 EIGEN_DONT_INLINE Scalar& insertCompressed(
Index row,
Index col);
864 StorageIndex m_index;
865 StorageIndex m_value;
867 typedef StorageIndex value_type;
869 : m_index(convert_index(i)), m_value(convert_index(v))
872 StorageIndex operator[](
Index i)
const {
return i==m_index ? m_value : 0; }
877 EIGEN_DONT_INLINE Scalar& insertUncompressed(
Index row,
Index col);
882 EIGEN_STRONG_INLINE Scalar& insertBackUncompressed(
Index row,
Index col)
884 const Index outer = IsRowMajor ? row : col;
885 const Index inner = IsRowMajor ? col : row;
887 eigen_assert(!isCompressed());
888 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
890 Index p = m_outerIndex[outer] + m_innerNonZeros[outer]++;
891 m_data.index(p) = convert_index(inner);
892 return (m_data.value(p) = 0);
896 static void check_template_parameters()
899 EIGEN_STATIC_ASSERT((Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
902 struct default_prunning_func {
903 default_prunning_func(
const Scalar& ref,
const RealScalar& eps) : reference(ref), epsilon(eps) {}
904 inline bool operator() (
const Index&,
const Index&,
const Scalar& value)
const 906 return !internal::isMuchSmallerThan(value, reference, epsilon);
915 template<
typename InputIterator,
typename SparseMatrixType,
typename DupFunctor>
916 void set_from_triplets(
const InputIterator& begin,
const InputIterator& end, SparseMatrixType& mat, DupFunctor dup_func)
918 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
919 typedef typename SparseMatrixType::Scalar Scalar;
920 typedef typename SparseMatrixType::StorageIndex StorageIndex;
926 typename SparseMatrixType::IndexVector wi(trMat.outerSize());
928 for(InputIterator it(begin); it!=end; ++it)
930 eigen_assert(it->row()>=0 && it->row()<mat.rows() && it->col()>=0 && it->col()<mat.cols());
931 wi(IsRowMajor ? it->col() : it->row())++;
936 for(InputIterator it(begin); it!=end; ++it)
937 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
940 trMat.collapseDuplicates(dup_func);
987 template<
typename Scalar,
int _Options,
typename _Index>
988 template<
typename InputIterators>
1003 template<
typename Scalar,
int _Options,
typename _Index>
1004 template<
typename InputIterators,
typename DupFunctor>
1007 internal::set_from_triplets<InputIterators, SparseMatrix<Scalar,_Options,_Index>, DupFunctor>(begin, end, *
this, dup_func);
1011 template<
typename Scalar,
int _Options,
typename _Index>
1012 template<
typename DupFunctor>
1015 eigen_assert(!isCompressed());
1017 IndexVector wi(innerSize());
1019 StorageIndex count = 0;
1021 for(
Index j=0; j<outerSize(); ++j)
1023 StorageIndex start = count;
1024 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
1025 for(
Index k=m_outerIndex[j]; k<oldEnd; ++k)
1027 Index i = m_data.index(k);
1031 m_data.value(wi(i)) = dup_func(m_data.value(wi(i)), m_data.value(k));
1035 m_data.value(count) = m_data.value(k);
1036 m_data.index(count) = m_data.index(k);
1041 m_outerIndex[j] = start;
1043 m_outerIndex[m_outerSize] = count;
1046 std::free(m_innerNonZeros);
1047 m_innerNonZeros = 0;
1048 m_data.resize(m_outerIndex[m_outerSize]);
1051 template<
typename Scalar,
int _Options,
typename _Index>
1052 template<
typename OtherDerived>
1056 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
1058 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 1059 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
1063 if (needToTranspose)
1065 #ifdef EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN 1066 EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN
1075 OtherCopy otherCopy(other.derived());
1076 OtherCopyEval otherCopyEval(otherCopy);
1083 for (
Index j=0; j<otherCopy.outerSize(); ++j)
1084 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1085 ++dest.m_outerIndex[it.index()];
1088 StorageIndex count = 0;
1089 IndexVector positions(dest.outerSize());
1090 for (
Index j=0; j<dest.outerSize(); ++j)
1092 StorageIndex tmp = dest.m_outerIndex[j];
1093 dest.m_outerIndex[j] = count;
1094 positions[j] = count;
1097 dest.m_outerIndex[dest.outerSize()] = count;
1099 dest.m_data.resize(count);
1101 for (StorageIndex j=0; j<otherCopy.outerSize(); ++j)
1103 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1105 Index pos = positions[it.index()]++;
1106 dest.m_data.index(pos) = j;
1107 dest.m_data.value(pos) = it.value();
1115 if(other.isRValue())
1117 initAssignment(other.derived());
1120 return Base::operator=(other.derived());
1124 template<
typename _Scalar,
int _Options,
typename _Index>
1127 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
1129 const Index outer = IsRowMajor ? row : col;
1130 const Index inner = IsRowMajor ? col : row;
1137 if(m_data.allocatedSize()==0)
1138 m_data.reserve(2*m_innerSize);
1141 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1142 if(!m_innerNonZeros) internal::throw_std_bad_alloc();
1144 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
1148 StorageIndex end = convert_index(m_data.allocatedSize());
1149 for(
Index j=1; j<=m_outerSize; ++j)
1150 m_outerIndex[j] = end;
1155 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1156 if(!m_innerNonZeros) internal::throw_std_bad_alloc();
1157 for(
Index j=0; j<m_outerSize; ++j)
1158 m_innerNonZeros[j] = m_outerIndex[j+1]-m_outerIndex[j];
1163 Index data_end = m_data.allocatedSize();
1167 if(m_outerIndex[outer]==data_end)
1169 eigen_internal_assert(m_innerNonZeros[outer]==0);
1173 StorageIndex p = convert_index(m_data.size());
1175 while(j>=0 && m_innerNonZeros[j]==0)
1176 m_outerIndex[j--] = p;
1179 ++m_innerNonZeros[outer];
1180 m_data.append(Scalar(0), inner);
1183 if(data_end != m_data.allocatedSize())
1188 eigen_internal_assert(data_end < m_data.allocatedSize());
1189 StorageIndex new_end = convert_index(m_data.allocatedSize());
1190 for(
Index k=outer+1; k<=m_outerSize; ++k)
1191 if(m_outerIndex[k]==data_end)
1192 m_outerIndex[k] = new_end;
1194 return m_data.value(p);
1199 if(m_outerIndex[outer+1]==data_end && m_outerIndex[outer]+m_innerNonZeros[outer]==m_data.size())
1201 eigen_internal_assert(outer+1==m_outerSize || m_innerNonZeros[outer+1]==0);
1204 ++m_innerNonZeros[outer];
1205 m_data.resize(m_data.size()+1);
1208 if(data_end != m_data.allocatedSize())
1213 eigen_internal_assert(data_end < m_data.allocatedSize());
1214 StorageIndex new_end = convert_index(m_data.allocatedSize());
1215 for(
Index k=outer+1; k<=m_outerSize; ++k)
1216 if(m_outerIndex[k]==data_end)
1217 m_outerIndex[k] = new_end;
1221 Index startId = m_outerIndex[outer];
1222 Index p = m_outerIndex[outer]+m_innerNonZeros[outer]-1;
1223 while ( (p > startId) && (m_data.index(p-1) > inner) )
1225 m_data.index(p) = m_data.index(p-1);
1226 m_data.value(p) = m_data.value(p-1);
1230 m_data.index(p) = convert_index(inner);
1231 return (m_data.value(p) = 0);
1234 if(m_data.size() != m_data.allocatedSize())
1237 m_data.resize(m_data.allocatedSize());
1241 return insertUncompressed(row,col);
1244 template<
typename _Scalar,
int _Options,
typename _Index>
1247 eigen_assert(!isCompressed());
1249 const Index outer = IsRowMajor ? row : col;
1250 const StorageIndex inner = convert_index(IsRowMajor ? col : row);
1252 Index room = m_outerIndex[outer+1] - m_outerIndex[outer];
1253 StorageIndex innerNNZ = m_innerNonZeros[outer];
1257 reserve(SingletonVector(outer,std::max<StorageIndex>(2,innerNNZ)));
1260 Index startId = m_outerIndex[outer];
1261 Index p = startId + m_innerNonZeros[outer];
1262 while ( (p > startId) && (m_data.index(p-1) > inner) )
1264 m_data.index(p) = m_data.index(p-1);
1265 m_data.value(p) = m_data.value(p-1);
1268 eigen_assert((p<=startId || m_data.index(p-1)!=inner) &&
"you cannot insert an element that already exists, you must call coeffRef to this end");
1270 m_innerNonZeros[outer]++;
1272 m_data.index(p) = inner;
1273 return (m_data.value(p) = 0);
1276 template<
typename _Scalar,
int _Options,
typename _Index>
1279 eigen_assert(isCompressed());
1281 const Index outer = IsRowMajor ? row : col;
1282 const Index inner = IsRowMajor ? col : row;
1284 Index previousOuter = outer;
1285 if (m_outerIndex[outer+1]==0)
1288 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
1290 m_outerIndex[previousOuter] = convert_index(m_data.size());
1293 m_outerIndex[outer+1] = m_outerIndex[outer];
1299 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
1300 && (size_t(m_outerIndex[outer+1]) == m_data.size());
1302 size_t startId = m_outerIndex[outer];
1304 size_t p = m_outerIndex[outer+1];
1305 ++m_outerIndex[outer+1];
1307 double reallocRatio = 1;
1308 if (m_data.allocatedSize()<=m_data.size())
1311 if (m_data.size()==0)
1320 double nnzEstimate = double(m_outerIndex[outer])*double(m_outerSize)/double(outer+1);
1321 reallocRatio = (nnzEstimate-double(m_data.size()))/double(m_data.size());
1325 reallocRatio = (std::min)((std::max)(reallocRatio,1.5),8.);
1328 m_data.resize(m_data.size()+1,reallocRatio);
1332 if (previousOuter==-1)
1336 for (
Index k=0; k<=(outer+1); ++k)
1337 m_outerIndex[k] = 0;
1339 while(m_outerIndex[k]==0)
1340 m_outerIndex[k++] = 1;
1341 while (k<=m_outerSize && m_outerIndex[k]!=0)
1342 m_outerIndex[k++]++;
1345 k = m_outerIndex[k]-1;
1348 m_data.index(k) = m_data.index(k-1);
1349 m_data.value(k) = m_data.value(k-1);
1358 while (j<=m_outerSize && m_outerIndex[j]!=0)
1359 m_outerIndex[j++]++;
1362 Index k = m_outerIndex[j]-1;
1365 m_data.index(k) = m_data.index(k-1);
1366 m_data.value(k) = m_data.value(k-1);
1372 while ( (p > startId) && (m_data.index(p-1) > inner) )
1374 m_data.index(p) = m_data.index(p-1);
1375 m_data.value(p) = m_data.value(p-1);
1379 m_data.index(p) = inner;
1380 return (m_data.value(p) = 0);
1385 template<
typename _Scalar,
int _Options,
typename _Index>
1387 :
evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > >
1392 explicit evaluator(
const SparseMatrixType &mat) : Base(mat) {}
1399 #endif // EIGEN_SPARSEMATRIX_H StorageIndex * outerIndexPtr()
Definition: SparseMatrix.h:166
const ConstDiagonalReturnType diagonal() const
Definition: SparseMatrix.h:646
bool isCompressed() const
Definition: SparseCompressedBase.h:107
Scalar & insert(Index row, Index col)
Definition: SparseMatrix.h:1125
Index cols() const
Definition: SparseMatrixBase.h:169
Storage order is column major (see TopicStorageOrders).
Definition: Constants.h:320
void conservativeResize(Index rows, Index cols)
Resizes the matrix to a rows x cols matrix leaving old values untouched.
Definition: SparseMatrix.h:549
const unsigned int CompressedAccessBit
Means that the underlying coefficients can be accessed through pointers to the sparse (un)compressed ...
Definition: Constants.h:186
const Scalar * valuePtr() const
Definition: SparseMatrix.h:144
A versatible sparse matrix representation.
Definition: SparseMatrix.h:92
void uncompress()
Turns the matrix into the uncompressed mode.
Definition: SparseMatrix.h:491
void prune(const KeepFunc &keep=KeepFunc())
Turns the matrix into compressed format, and suppresses all nonzeros which do not satisfy the predica...
Definition: SparseMatrix.h:516
DiagonalReturnType diagonal()
Definition: SparseMatrix.h:652
~SparseMatrix()
Destructor.
Definition: SparseMatrix.h:830
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:88
Definition: BinaryFunctors.h:32
Scalar & coeffRef(Index row, Index col)
Definition: SparseMatrix.h:202
const unsigned int LvalueBit
Means the expression has a coeffRef() method, i.e.
Definition: Constants.h:139
Definition: CoreEvaluators.h:90
SparseMatrix(const SparseMatrix &other)
Copy constructor (it performs a deep copy)
Definition: SparseMatrix.h:700
Namespace containing all symbols from the Eigen library.
Definition: bench_norm.cpp:85
Pseudo expression to manipulate a triangular sparse matrix as a selfadjoint matrix.
Definition: SparseSelfAdjointView.h:43
StorageIndex * innerNonZeroPtr()
Definition: SparseMatrix.h:175
Holds information about the various numeric (i.e.
Definition: NumTraits.h:150
Index cols() const
Definition: SparseMatrix.h:134
Index nonZeros() const
Definition: SparseCompressedBase.h:56
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:37
const unsigned int RowMajorBit
for a matrix, this means that the storage order is row-major.
Definition: Constants.h:61
Index innerSize() const
Definition: SparseMatrix.h:137
SparseMatrix(const DiagonalBase< OtherDerived > &other)
Copy constructor with in-place evaluation.
Definition: SparseMatrix.h:719
Definition: XprHelper.h:437
Definition: SparseMatrix.h:862
Scalar * valuePtr()
Definition: SparseMatrix.h:148
Common base class for all classes T such that MatrixBase has an operator=(T) and a constructor Matrix...
Definition: EigenBase.h:28
The type used to identify a matrix expression.
Definition: Constants.h:506
Definition: ReturnByValue.h:50
void setFromTriplets(const InputIterators &begin, const InputIterators &end)
Fill the matrix *this with the list of triplets defined by the iterator range begin - end...
Definition: SparseMatrix.h:989
Index rows() const
Definition: SparseMatrixBase.h:167
void setIdentity()
Sets *this to the identity matrix.
Definition: SparseMatrix.h:740
Base class of any sparse matrices or sparse expressions.
Definition: ForwardDeclarations.h:281
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Suppresses all nonzeros which are much smaller than reference under the tolerence epsilon...
Definition: SparseMatrix.h:503
const StorageIndex * innerNonZeroPtr() const
Definition: SparseMatrix.h:171
a sparse vector class
Definition: SparseUtil.h:54
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:33
void swap(SparseMatrix &other)
Swaps the content of two sparse matrices of the same type.
Definition: SparseMatrix.h:728
Definition: SparseCompressedBase.h:136
Index rows() const
Definition: SparseMatrix.h:132
void resize(Index rows, Index cols)
Resizes the matrix to a rows x cols matrix and initializes it to zero.
Definition: SparseMatrix.h:617
void reserve(Index reserveSize)
Preallocates reserveSize non zeros.
Definition: SparseMatrix.h:258
Definition: XprHelper.h:396
Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue=Scalar(0)) const
Like at(), but the search is performed in the range [start,end)
Definition: CompressedStorage.h:159
The type used to identify a general sparse storage.
Definition: Constants.h:494
SparseMatrix(Index rows, Index cols)
Constructs a rows x cols empty matrix.
Definition: SparseMatrix.h:663
void makeCompressed()
Turns the matrix into the compressed format.
Definition: SparseMatrix.h:460
SparseMatrix(const SparseMatrixBase< OtherDerived > &other)
Constructs a sparse matrix from the sparse expression other.
Definition: SparseMatrix.h:672
SparseMatrix(const ReturnByValue< OtherDerived > &other)
Copy constructor with in-place evaluation.
Definition: SparseMatrix.h:709
SparseMatrix()
Default constructor yielding an empty 0 x 0 matrix.
Definition: SparseMatrix.h:655
const StorageIndex * innerIndexPtr() const
Definition: SparseMatrix.h:153
Definition: BandTriangularSolver.h:13
StorageIndex * innerIndexPtr()
Definition: SparseMatrix.h:157
const StorageIndex * outerIndexPtr() const
Definition: SparseMatrix.h:162
Definition: DiagonalMatrix.h:18
General-purpose arrays with easy API for coefficient-wise operations.
Definition: Array.h:45
Storage order is row major (see TopicStorageOrders).
Definition: Constants.h:322
Scalar coeff(Index row, Index col) const
Definition: SparseMatrix.h:184
The type used to identify a dense storage.
Definition: Constants.h:491
void setZero()
Removes all non zeros but keep allocated memory.
Definition: SparseMatrix.h:247
Expression of a diagonal/subdiagonal/superdiagonal in a matrix.
Definition: Diagonal.h:63
const int Dynamic
This value means that a positive quantity (e.g., a size) is not known at compile-time, and that instead the value is stored in some runtime variable.
Definition: Constants.h:21
Index outerSize() const
Definition: SparseMatrix.h:139
SparseMatrix(const SparseSelfAdjointView< OtherDerived, UpLo > &other)
Constructs a sparse matrix from the sparse selfadjoint view other.
Definition: SparseMatrix.h:692
NumTraits< Scalar >::Real RealScalar
This is the "real scalar" type; if the Scalar type is already real numbers (e.g.
Definition: SparseMatrixBase.h:119
Common base class for sparse [compressed]-{row|column}-storage format.
Definition: SparseCompressedBase.h:15
Sparse matrix.
Definition: MappedSparseMatrix.h:32
EIGEN_DEVICE_FUNC Derived & derived()
Definition: EigenBase.h:44
Definition: ForwardDeclarations.h:17
Index searchLowerIndex(Index key) const
Definition: CompressedStorage.h:125
Definition: SparseCompressedBase.h:214