compbio
SparseCwiseBinaryOp.h
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_SPARSE_CWISE_BINARY_OP_H
11 #define EIGEN_SPARSE_CWISE_BINARY_OP_H
12 
13 namespace Eigen {
14 
15 // Here we have to handle 3 cases:
16 // 1 - sparse op dense
17 // 2 - dense op sparse
18 // 3 - sparse op sparse
19 // We also need to implement a 4th iterator for:
20 // 4 - dense op dense
21 // Finally, we also need to distinguish between the product and other operations :
22 // configuration returned mode
23 // 1 - sparse op dense product sparse
24 // generic dense
25 // 2 - dense op sparse product sparse
26 // generic dense
27 // 3 - sparse op sparse product sparse
28 // generic sparse
29 // 4 - dense op dense product dense
30 // generic dense
31 //
32 // TODO to ease compiler job, we could specialize product/quotient with a scalar
33 // and fallback to cwise-unary evaluator using bind1st_op and bind2nd_op.
34 
35 template<typename BinaryOp, typename Lhs, typename Rhs>
36 class CwiseBinaryOpImpl<BinaryOp, Lhs, Rhs, Sparse>
37  : public SparseMatrixBase<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
38 {
39  public:
42  EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
44  {
45  EIGEN_STATIC_ASSERT((
47  typename internal::traits<Rhs>::StorageKind>::value)
48  || ((Lhs::Flags&RowMajorBit) == (Rhs::Flags&RowMajorBit))),
49  THE_STORAGE_ORDER_OF_BOTH_SIDES_MUST_MATCH);
50  }
51 };
52 
53 namespace internal {
54 
55 
56 // Generic "sparse OP sparse"
57 template<typename XprType> struct binary_sparse_evaluator;
58 
59 template<typename BinaryOp, typename Lhs, typename Rhs>
61  : evaluator_base<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
62 {
63 protected:
64  typedef typename evaluator<Lhs>::InnerIterator LhsIterator;
65  typedef typename evaluator<Rhs>::InnerIterator RhsIterator;
67  typedef typename traits<XprType>::Scalar Scalar;
68  typedef typename XprType::StorageIndex StorageIndex;
69 public:
70 
72  {
73  public:
74 
75  EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
76  : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor)
77  {
78  this->operator++();
79  }
80 
81  EIGEN_STRONG_INLINE InnerIterator& operator++()
82  {
83  if (m_lhsIter && m_rhsIter && (m_lhsIter.index() == m_rhsIter.index()))
84  {
85  m_id = m_lhsIter.index();
86  m_value = m_functor(m_lhsIter.value(), m_rhsIter.value());
87  ++m_lhsIter;
88  ++m_rhsIter;
89  }
90  else if (m_lhsIter && (!m_rhsIter || (m_lhsIter.index() < m_rhsIter.index())))
91  {
92  m_id = m_lhsIter.index();
93  m_value = m_functor(m_lhsIter.value(), Scalar(0));
94  ++m_lhsIter;
95  }
96  else if (m_rhsIter && (!m_lhsIter || (m_lhsIter.index() > m_rhsIter.index())))
97  {
98  m_id = m_rhsIter.index();
99  m_value = m_functor(Scalar(0), m_rhsIter.value());
100  ++m_rhsIter;
101  }
102  else
103  {
104  m_value = 0; // this is to avoid a compilation warning
105  m_id = -1;
106  }
107  return *this;
108  }
109 
110  EIGEN_STRONG_INLINE Scalar value() const { return m_value; }
111 
112  EIGEN_STRONG_INLINE StorageIndex index() const { return m_id; }
113  EIGEN_STRONG_INLINE Index row() const { return Lhs::IsRowMajor ? m_lhsIter.row() : index(); }
114  EIGEN_STRONG_INLINE Index col() const { return Lhs::IsRowMajor ? index() : m_lhsIter.col(); }
115 
116  EIGEN_STRONG_INLINE operator bool() const { return m_id>=0; }
117 
118  protected:
119  LhsIterator m_lhsIter;
120  RhsIterator m_rhsIter;
121  const BinaryOp& m_functor;
122  Scalar m_value;
123  StorageIndex m_id;
124  };
125 
126 
127  enum {
129  Flags = XprType::Flags
130  };
131 
132  explicit binary_evaluator(const XprType& xpr)
133  : m_functor(xpr.functor()),
134  m_lhsImpl(xpr.lhs()),
135  m_rhsImpl(xpr.rhs())
136  {
137  EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
138  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
139  }
140 
141  inline Index nonZerosEstimate() const {
142  return m_lhsImpl.nonZerosEstimate() + m_rhsImpl.nonZerosEstimate();
143  }
144 
145 protected:
146  const BinaryOp m_functor;
147  evaluator<Lhs> m_lhsImpl;
148  evaluator<Rhs> m_rhsImpl;
149 };
150 
151 // dense op sparse
152 template<typename BinaryOp, typename Lhs, typename Rhs>
154  : evaluator_base<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
155 {
156 protected:
157  typedef typename evaluator<Rhs>::InnerIterator RhsIterator;
159  typedef typename traits<XprType>::Scalar Scalar;
160  typedef typename XprType::StorageIndex StorageIndex;
161 public:
162 
164  {
165  enum { IsRowMajor = (int(Rhs::Flags)&RowMajorBit)==RowMajorBit };
166  public:
167 
168  EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
169  : m_lhsEval(aEval.m_lhsImpl), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor), m_value(0), m_id(-1), m_innerSize(aEval.m_expr.rhs().innerSize())
170  {
171  this->operator++();
172  }
173 
174  EIGEN_STRONG_INLINE InnerIterator& operator++()
175  {
176  ++m_id;
177  if(m_id<m_innerSize)
178  {
179  Scalar lhsVal = m_lhsEval.coeff(IsRowMajor?m_rhsIter.outer():m_id,
180  IsRowMajor?m_id:m_rhsIter.outer());
181  if(m_rhsIter && m_rhsIter.index()==m_id)
182  {
183  m_value = m_functor(lhsVal, m_rhsIter.value());
184  ++m_rhsIter;
185  }
186  else
187  m_value = m_functor(lhsVal, Scalar(0));
188  }
189 
190  return *this;
191  }
192 
193  EIGEN_STRONG_INLINE Scalar value() const { eigen_internal_assert(m_id<m_innerSize); return m_value; }
194 
195  EIGEN_STRONG_INLINE StorageIndex index() const { return m_id; }
196  EIGEN_STRONG_INLINE Index row() const { return IsRowMajor ? m_rhsIter.outer() : m_id; }
197  EIGEN_STRONG_INLINE Index col() const { return IsRowMajor ? m_id : m_rhsIter.outer(); }
198 
199  EIGEN_STRONG_INLINE operator bool() const { return m_id<m_innerSize; }
200 
201  protected:
202  const evaluator<Lhs> &m_lhsEval;
203  RhsIterator m_rhsIter;
204  const BinaryOp& m_functor;
205  Scalar m_value;
206  StorageIndex m_id;
207  StorageIndex m_innerSize;
208  };
209 
210 
211  enum {
213  // Expose storage order of the sparse expression
214  Flags = (XprType::Flags & ~RowMajorBit) | (int(Rhs::Flags)&RowMajorBit)
215  };
216 
217  explicit binary_evaluator(const XprType& xpr)
218  : m_functor(xpr.functor()),
219  m_lhsImpl(xpr.lhs()),
220  m_rhsImpl(xpr.rhs()),
221  m_expr(xpr)
222  {
223  EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
224  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
225  }
226 
227  inline Index nonZerosEstimate() const {
228  return m_expr.size();
229  }
230 
231 protected:
232  const BinaryOp m_functor;
233  evaluator<Lhs> m_lhsImpl;
234  evaluator<Rhs> m_rhsImpl;
235  const XprType &m_expr;
236 };
237 
238 // sparse op dense
239 template<typename BinaryOp, typename Lhs, typename Rhs>
241  : evaluator_base<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
242 {
243 protected:
244  typedef typename evaluator<Lhs>::InnerIterator LhsIterator;
246  typedef typename traits<XprType>::Scalar Scalar;
247  typedef typename XprType::StorageIndex StorageIndex;
248 public:
249 
251  {
252  enum { IsRowMajor = (int(Lhs::Flags)&RowMajorBit)==RowMajorBit };
253  public:
254 
255  EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
256  : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsEval(aEval.m_rhsImpl), m_functor(aEval.m_functor), m_value(0), m_id(-1), m_innerSize(aEval.m_expr.lhs().innerSize())
257  {
258  this->operator++();
259  }
260 
261  EIGEN_STRONG_INLINE InnerIterator& operator++()
262  {
263  ++m_id;
264  if(m_id<m_innerSize)
265  {
266  Scalar rhsVal = m_rhsEval.coeff(IsRowMajor?m_lhsIter.outer():m_id,
267  IsRowMajor?m_id:m_lhsIter.outer());
268  if(m_lhsIter && m_lhsIter.index()==m_id)
269  {
270  m_value = m_functor(m_lhsIter.value(), rhsVal);
271  ++m_lhsIter;
272  }
273  else
274  m_value = m_functor(Scalar(0),rhsVal);
275  }
276 
277  return *this;
278  }
279 
280  EIGEN_STRONG_INLINE Scalar value() const { eigen_internal_assert(m_id<m_innerSize); return m_value; }
281 
282  EIGEN_STRONG_INLINE StorageIndex index() const { return m_id; }
283  EIGEN_STRONG_INLINE Index row() const { return IsRowMajor ? m_lhsIter.outer() : m_id; }
284  EIGEN_STRONG_INLINE Index col() const { return IsRowMajor ? m_id : m_lhsIter.outer(); }
285 
286  EIGEN_STRONG_INLINE operator bool() const { return m_id<m_innerSize; }
287 
288  protected:
289  LhsIterator m_lhsIter;
290  const evaluator<Rhs> &m_rhsEval;
291  const BinaryOp& m_functor;
292  Scalar m_value;
293  StorageIndex m_id;
294  StorageIndex m_innerSize;
295  };
296 
297 
298  enum {
300  // Expose storage order of the sparse expression
301  Flags = (XprType::Flags & ~RowMajorBit) | (int(Lhs::Flags)&RowMajorBit)
302  };
303 
304  explicit binary_evaluator(const XprType& xpr)
305  : m_functor(xpr.functor()),
306  m_lhsImpl(xpr.lhs()),
307  m_rhsImpl(xpr.rhs()),
308  m_expr(xpr)
309  {
310  EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
311  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
312  }
313 
314  inline Index nonZerosEstimate() const {
315  return m_expr.size();
316  }
317 
318 protected:
319  const BinaryOp m_functor;
320  evaluator<Lhs> m_lhsImpl;
321  evaluator<Rhs> m_rhsImpl;
322  const XprType &m_expr;
323 };
324 
325 template<typename T,
326  typename LhsKind = typename evaluator_traits<typename T::Lhs>::Kind,
327  typename RhsKind = typename evaluator_traits<typename T::Rhs>::Kind,
328  typename LhsScalar = typename traits<typename T::Lhs>::Scalar,
329  typename RhsScalar = typename traits<typename T::Rhs>::Scalar> struct sparse_conjunction_evaluator;
330 
331 // "sparse .* sparse"
332 template<typename T1, typename T2, typename Lhs, typename Rhs>
334  : sparse_conjunction_evaluator<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs> >
335 {
338  explicit binary_evaluator(const XprType& xpr) : Base(xpr) {}
339 };
340 // "dense .* sparse"
341 template<typename T1, typename T2, typename Lhs, typename Rhs>
343  : sparse_conjunction_evaluator<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs> >
344 {
347  explicit binary_evaluator(const XprType& xpr) : Base(xpr) {}
348 };
349 // "sparse .* dense"
350 template<typename T1, typename T2, typename Lhs, typename Rhs>
352  : sparse_conjunction_evaluator<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs> >
353 {
356  explicit binary_evaluator(const XprType& xpr) : Base(xpr) {}
357 };
358 
359 // "sparse && sparse"
360 template<typename Lhs, typename Rhs>
362  : sparse_conjunction_evaluator<CwiseBinaryOp<scalar_boolean_and_op, Lhs, Rhs> >
363 {
366  explicit binary_evaluator(const XprType& xpr) : Base(xpr) {}
367 };
368 // "dense && sparse"
369 template<typename Lhs, typename Rhs>
371  : sparse_conjunction_evaluator<CwiseBinaryOp<scalar_boolean_and_op, Lhs, Rhs> >
372 {
375  explicit binary_evaluator(const XprType& xpr) : Base(xpr) {}
376 };
377 // "sparse && dense"
378 template<typename Lhs, typename Rhs>
380  : sparse_conjunction_evaluator<CwiseBinaryOp<scalar_boolean_and_op, Lhs, Rhs> >
381 {
384  explicit binary_evaluator(const XprType& xpr) : Base(xpr) {}
385 };
386 
387 // "sparse ^ sparse"
388 template<typename XprType>
390  : evaluator_base<XprType>
391 {
392 protected:
393  typedef typename XprType::Functor BinaryOp;
394  typedef typename XprType::Lhs LhsArg;
395  typedef typename XprType::Rhs RhsArg;
396  typedef typename evaluator<LhsArg>::InnerIterator LhsIterator;
397  typedef typename evaluator<RhsArg>::InnerIterator RhsIterator;
398  typedef typename XprType::StorageIndex StorageIndex;
399  typedef typename traits<XprType>::Scalar Scalar;
400 public:
401 
403  {
404  public:
405 
406  EIGEN_STRONG_INLINE InnerIterator(const sparse_conjunction_evaluator& aEval, Index outer)
407  : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor)
408  {
409  while (m_lhsIter && m_rhsIter && (m_lhsIter.index() != m_rhsIter.index()))
410  {
411  if (m_lhsIter.index() < m_rhsIter.index())
412  ++m_lhsIter;
413  else
414  ++m_rhsIter;
415  }
416  }
417 
418  EIGEN_STRONG_INLINE InnerIterator& operator++()
419  {
420  ++m_lhsIter;
421  ++m_rhsIter;
422  while (m_lhsIter && m_rhsIter && (m_lhsIter.index() != m_rhsIter.index()))
423  {
424  if (m_lhsIter.index() < m_rhsIter.index())
425  ++m_lhsIter;
426  else
427  ++m_rhsIter;
428  }
429  return *this;
430  }
431 
432  EIGEN_STRONG_INLINE Scalar value() const { return m_functor(m_lhsIter.value(), m_rhsIter.value()); }
433 
434  EIGEN_STRONG_INLINE StorageIndex index() const { return m_lhsIter.index(); }
435  EIGEN_STRONG_INLINE Index row() const { return m_lhsIter.row(); }
436  EIGEN_STRONG_INLINE Index col() const { return m_lhsIter.col(); }
437 
438  EIGEN_STRONG_INLINE operator bool() const { return (m_lhsIter && m_rhsIter); }
439 
440  protected:
441  LhsIterator m_lhsIter;
442  RhsIterator m_rhsIter;
443  const BinaryOp& m_functor;
444  };
445 
446 
447  enum {
449  Flags = XprType::Flags
450  };
451 
452  explicit sparse_conjunction_evaluator(const XprType& xpr)
453  : m_functor(xpr.functor()),
454  m_lhsImpl(xpr.lhs()),
455  m_rhsImpl(xpr.rhs())
456  {
457  EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
458  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
459  }
460 
461  inline Index nonZerosEstimate() const {
462  return (std::min)(m_lhsImpl.nonZerosEstimate(), m_rhsImpl.nonZerosEstimate());
463  }
464 
465 protected:
466  const BinaryOp m_functor;
467  evaluator<LhsArg> m_lhsImpl;
468  evaluator<RhsArg> m_rhsImpl;
469 };
470 
471 // "dense ^ sparse"
472 template<typename XprType>
474  : evaluator_base<XprType>
475 {
476 protected:
477  typedef typename XprType::Functor BinaryOp;
478  typedef typename XprType::Lhs LhsArg;
479  typedef typename XprType::Rhs RhsArg;
481  typedef typename evaluator<RhsArg>::InnerIterator RhsIterator;
482  typedef typename XprType::StorageIndex StorageIndex;
483  typedef typename traits<XprType>::Scalar Scalar;
484 public:
485 
487  {
488  enum { IsRowMajor = (int(RhsArg::Flags)&RowMajorBit)==RowMajorBit };
489 
490  public:
491 
492  EIGEN_STRONG_INLINE InnerIterator(const sparse_conjunction_evaluator& aEval, Index outer)
493  : m_lhsEval(aEval.m_lhsImpl), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor), m_outer(outer)
494  {}
495 
496  EIGEN_STRONG_INLINE InnerIterator& operator++()
497  {
498  ++m_rhsIter;
499  return *this;
500  }
501 
502  EIGEN_STRONG_INLINE Scalar value() const
503  { return m_functor(m_lhsEval.coeff(IsRowMajor?m_outer:m_rhsIter.index(),IsRowMajor?m_rhsIter.index():m_outer), m_rhsIter.value()); }
504 
505  EIGEN_STRONG_INLINE StorageIndex index() const { return m_rhsIter.index(); }
506  EIGEN_STRONG_INLINE Index row() const { return m_rhsIter.row(); }
507  EIGEN_STRONG_INLINE Index col() const { return m_rhsIter.col(); }
508 
509  EIGEN_STRONG_INLINE operator bool() const { return m_rhsIter; }
510 
511  protected:
512  const LhsEvaluator &m_lhsEval;
513  RhsIterator m_rhsIter;
514  const BinaryOp& m_functor;
515  const Index m_outer;
516  };
517 
518 
519  enum {
521  // Expose storage order of the sparse expression
522  Flags = (XprType::Flags & ~RowMajorBit) | (int(RhsArg::Flags)&RowMajorBit)
523  };
524 
525  explicit sparse_conjunction_evaluator(const XprType& xpr)
526  : m_functor(xpr.functor()),
527  m_lhsImpl(xpr.lhs()),
528  m_rhsImpl(xpr.rhs())
529  {
530  EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
531  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
532  }
533 
534  inline Index nonZerosEstimate() const {
535  return m_rhsImpl.nonZerosEstimate();
536  }
537 
538 protected:
539  const BinaryOp m_functor;
540  evaluator<LhsArg> m_lhsImpl;
541  evaluator<RhsArg> m_rhsImpl;
542 };
543 
544 // "sparse ^ dense"
545 template<typename XprType>
547  : evaluator_base<XprType>
548 {
549 protected:
550  typedef typename XprType::Functor BinaryOp;
551  typedef typename XprType::Lhs LhsArg;
552  typedef typename XprType::Rhs RhsArg;
553  typedef typename evaluator<LhsArg>::InnerIterator LhsIterator;
555  typedef typename XprType::StorageIndex StorageIndex;
556  typedef typename traits<XprType>::Scalar Scalar;
557 public:
558 
560  {
561  enum { IsRowMajor = (int(LhsArg::Flags)&RowMajorBit)==RowMajorBit };
562 
563  public:
564 
565  EIGEN_STRONG_INLINE InnerIterator(const sparse_conjunction_evaluator& aEval, Index outer)
566  : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsEval(aEval.m_rhsImpl), m_functor(aEval.m_functor), m_outer(outer)
567  {}
568 
569  EIGEN_STRONG_INLINE InnerIterator& operator++()
570  {
571  ++m_lhsIter;
572  return *this;
573  }
574 
575  EIGEN_STRONG_INLINE Scalar value() const
576  { return m_functor(m_lhsIter.value(),
577  m_rhsEval.coeff(IsRowMajor?m_outer:m_lhsIter.index(),IsRowMajor?m_lhsIter.index():m_outer)); }
578 
579  EIGEN_STRONG_INLINE StorageIndex index() const { return m_lhsIter.index(); }
580  EIGEN_STRONG_INLINE Index row() const { return m_lhsIter.row(); }
581  EIGEN_STRONG_INLINE Index col() const { return m_lhsIter.col(); }
582 
583  EIGEN_STRONG_INLINE operator bool() const { return m_lhsIter; }
584 
585  protected:
586  LhsIterator m_lhsIter;
587  const evaluator<RhsArg> &m_rhsEval;
588  const BinaryOp& m_functor;
589  const Index m_outer;
590  };
591 
592 
593  enum {
595  // Expose storage order of the sparse expression
596  Flags = (XprType::Flags & ~RowMajorBit) | (int(LhsArg::Flags)&RowMajorBit)
597  };
598 
599  explicit sparse_conjunction_evaluator(const XprType& xpr)
600  : m_functor(xpr.functor()),
601  m_lhsImpl(xpr.lhs()),
602  m_rhsImpl(xpr.rhs())
603  {
604  EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
605  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
606  }
607 
608  inline Index nonZerosEstimate() const {
609  return m_lhsImpl.nonZerosEstimate();
610  }
611 
612 protected:
613  const BinaryOp m_functor;
614  evaluator<LhsArg> m_lhsImpl;
615  evaluator<RhsArg> m_rhsImpl;
616 };
617 
618 }
619 
620 /***************************************************************************
621 * Implementation of SparseMatrixBase and SparseCwise functions/operators
622 ***************************************************************************/
623 
624 template<typename Derived>
625 template<typename OtherDerived>
626 EIGEN_STRONG_INLINE Derived &
628 {
629  return derived() = derived() - other.derived();
630 }
631 
632 template<typename Derived>
633 template<typename OtherDerived>
634 EIGEN_STRONG_INLINE Derived &
636 {
637  return derived() = derived() + other.derived();
638 }
639 
640 template<typename Derived>
641 template<typename OtherDerived>
643 {
644  call_assignment_no_alias(derived(), other.derived(), internal::add_assign_op<Scalar,typename OtherDerived::Scalar>());
645  return derived();
646 }
647 
648 template<typename Derived>
649 template<typename OtherDerived>
651 {
652  call_assignment_no_alias(derived(), other.derived(), internal::sub_assign_op<Scalar,typename OtherDerived::Scalar>());
653  return derived();
654 }
655 
656 template<typename Derived>
657 template<typename OtherDerived>
658 EIGEN_STRONG_INLINE const typename SparseMatrixBase<Derived>::template CwiseProductDenseReturnType<OtherDerived>::Type
660 {
661  return typename CwiseProductDenseReturnType<OtherDerived>::Type(derived(), other.derived());
662 }
663 
664 template<typename DenseDerived, typename SparseDerived>
665 EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_sum_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>
666 operator+(const MatrixBase<DenseDerived> &a, const SparseMatrixBase<SparseDerived> &b)
667 {
668  return CwiseBinaryOp<internal::scalar_sum_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>(a.derived(), b.derived());
669 }
670 
671 template<typename SparseDerived, typename DenseDerived>
672 EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_sum_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>
673 operator+(const SparseMatrixBase<SparseDerived> &a, const MatrixBase<DenseDerived> &b)
674 {
675  return CwiseBinaryOp<internal::scalar_sum_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>(a.derived(), b.derived());
676 }
677 
678 template<typename DenseDerived, typename SparseDerived>
679 EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_difference_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>
680 operator-(const MatrixBase<DenseDerived> &a, const SparseMatrixBase<SparseDerived> &b)
681 {
682  return CwiseBinaryOp<internal::scalar_difference_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>(a.derived(), b.derived());
683 }
684 
685 template<typename SparseDerived, typename DenseDerived>
686 EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_difference_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>
687 operator-(const SparseMatrixBase<SparseDerived> &a, const MatrixBase<DenseDerived> &b)
688 {
689  return CwiseBinaryOp<internal::scalar_difference_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>(a.derived(), b.derived());
690 }
691 
692 } // end namespace Eigen
693 
694 #endif // EIGEN_SPARSE_CWISE_BINARY_OP_H
Definition: Constants.h:526
Definition: AssignmentFunctors.h:67
Definition: Meta.h:63
Definition: CoreEvaluators.h:90
Definition: SparseCwiseBinaryOp.h:57
Namespace containing all symbols from the Eigen library.
Definition: bench_norm.cpp:85
const unsigned int RowMajorBit
for a matrix, this means that the storage order is row-major.
Definition: Constants.h:61
EIGEN_DEVICE_FUNC const _RhsNested & rhs() const
Definition: CwiseBinaryOp.h:135
Definition: Constants.h:529
Definition: CoreEvaluators.h:109
Definition: CwiseBinaryOp.h:55
Definition: BinaryFunctors.h:389
Definition: BinaryFunctors.h:76
Generic expression where a coefficient-wise binary operator is applied to two expressions.
Definition: CwiseBinaryOp.h:77
Base class of any sparse matrices or sparse expressions.
Definition: ForwardDeclarations.h:281
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:33
The type used to identify a general sparse storage.
Definition: Constants.h:494
Definition: CoreEvaluators.h:61
EIGEN_DEVICE_FUNC const BinaryOp & functor() const
Definition: CwiseBinaryOp.h:138
Definition: BandTriangularSolver.h:13
Definition: XprHelper.h:146
Definition: DiagonalMatrix.h:18
Definition: SparseCwiseBinaryOp.h:329
Definition: AssignmentFunctors.h:46
Base class for all dense matrices, vectors, and expressions.
Definition: MatrixBase.h:48
Definition: ForwardDeclarations.h:17
EIGEN_DEVICE_FUNC const _LhsNested & lhs() const
Definition: CwiseBinaryOp.h:132
An InnerIterator allows to loop over the element of any matrix expression.
Definition: CoreIterators.h:33