Eigen  3.3.0
 
Loading...
Searching...
No Matches
SparseCwiseBinaryOp.h
1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_SPARSE_CWISE_BINARY_OP_H
11#define EIGEN_SPARSE_CWISE_BINARY_OP_H
12
13namespace Eigen {
14
15// Here we have to handle 3 cases:
16// 1 - sparse op dense
17// 2 - dense op sparse
18// 3 - sparse op sparse
19// We also need to implement a 4th iterator for:
20// 4 - dense op dense
21// Finally, we also need to distinguish between the product and other operations :
22// configuration returned mode
23// 1 - sparse op dense product sparse
24// generic dense
25// 2 - dense op sparse product sparse
26// generic dense
27// 3 - sparse op sparse product sparse
28// generic sparse
29// 4 - dense op dense product dense
30// generic dense
31//
32// TODO to ease compiler job, we could specialize product/quotient with a scalar
33// and fallback to cwise-unary evaluator using bind1st_op and bind2nd_op.
34
35template<typename BinaryOp, typename Lhs, typename Rhs>
36class CwiseBinaryOpImpl<BinaryOp, Lhs, Rhs, Sparse>
37 : public SparseMatrixBase<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
38{
39 public:
40 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> Derived;
41 typedef SparseMatrixBase<Derived> Base;
42 EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
43 CwiseBinaryOpImpl()
44 {
45 EIGEN_STATIC_ASSERT((
46 (!internal::is_same<typename internal::traits<Lhs>::StorageKind,
47 typename internal::traits<Rhs>::StorageKind>::value)
48 || ((Lhs::Flags&RowMajorBit) == (Rhs::Flags&RowMajorBit))),
49 THE_STORAGE_ORDER_OF_BOTH_SIDES_MUST_MATCH);
50 }
51};
52
53namespace internal {
54
55
56// Generic "sparse OP sparse"
57template<typename XprType> struct binary_sparse_evaluator;
58
59template<typename BinaryOp, typename Lhs, typename Rhs>
60struct binary_evaluator<CwiseBinaryOp<BinaryOp, Lhs, Rhs>, IteratorBased, IteratorBased>
61 : evaluator_base<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
62{
63protected:
64 typedef typename evaluator<Lhs>::InnerIterator LhsIterator;
65 typedef typename evaluator<Rhs>::InnerIterator RhsIterator;
66 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> XprType;
67 typedef typename traits<XprType>::Scalar Scalar;
68 typedef typename XprType::StorageIndex StorageIndex;
69public:
70
71 class ReverseInnerIterator;
72 class InnerIterator
73 {
74 public:
75
76 EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
77 : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor)
78 {
79 this->operator++();
80 }
81
82 EIGEN_STRONG_INLINE InnerIterator& operator++()
83 {
84 if (m_lhsIter && m_rhsIter && (m_lhsIter.index() == m_rhsIter.index()))
85 {
86 m_id = m_lhsIter.index();
87 m_value = m_functor(m_lhsIter.value(), m_rhsIter.value());
88 ++m_lhsIter;
89 ++m_rhsIter;
90 }
91 else if (m_lhsIter && (!m_rhsIter || (m_lhsIter.index() < m_rhsIter.index())))
92 {
93 m_id = m_lhsIter.index();
94 m_value = m_functor(m_lhsIter.value(), Scalar(0));
95 ++m_lhsIter;
96 }
97 else if (m_rhsIter && (!m_lhsIter || (m_lhsIter.index() > m_rhsIter.index())))
98 {
99 m_id = m_rhsIter.index();
100 m_value = m_functor(Scalar(0), m_rhsIter.value());
101 ++m_rhsIter;
102 }
103 else
104 {
105 m_value = 0; // this is to avoid a compilation warning
106 m_id = -1;
107 }
108 return *this;
109 }
110
111 EIGEN_STRONG_INLINE Scalar value() const { return m_value; }
112
113 EIGEN_STRONG_INLINE StorageIndex index() const { return m_id; }
114 EIGEN_STRONG_INLINE Index row() const { return Lhs::IsRowMajor ? m_lhsIter.row() : index(); }
115 EIGEN_STRONG_INLINE Index col() const { return Lhs::IsRowMajor ? index() : m_lhsIter.col(); }
116
117 EIGEN_STRONG_INLINE operator bool() const { return m_id>=0; }
118
119 protected:
120 LhsIterator m_lhsIter;
121 RhsIterator m_rhsIter;
122 const BinaryOp& m_functor;
123 Scalar m_value;
124 StorageIndex m_id;
125 };
126
127
128 enum {
129 CoeffReadCost = evaluator<Lhs>::CoeffReadCost + evaluator<Rhs>::CoeffReadCost + functor_traits<BinaryOp>::Cost,
130 Flags = XprType::Flags
131 };
132
133 explicit binary_evaluator(const XprType& xpr)
134 : m_functor(xpr.functor()),
135 m_lhsImpl(xpr.lhs()),
136 m_rhsImpl(xpr.rhs())
137 {
138 EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
139 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
140 }
141
142 inline Index nonZerosEstimate() const {
143 return m_lhsImpl.nonZerosEstimate() + m_rhsImpl.nonZerosEstimate();
144 }
145
146protected:
147 const BinaryOp m_functor;
148 evaluator<Lhs> m_lhsImpl;
149 evaluator<Rhs> m_rhsImpl;
150};
151
152// dense op sparse
153template<typename BinaryOp, typename Lhs, typename Rhs>
154struct binary_evaluator<CwiseBinaryOp<BinaryOp, Lhs, Rhs>, IndexBased, IteratorBased>
155 : evaluator_base<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
156{
157protected:
158 typedef typename evaluator<Rhs>::InnerIterator RhsIterator;
159 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> XprType;
160 typedef typename traits<XprType>::Scalar Scalar;
161 typedef typename XprType::StorageIndex StorageIndex;
162public:
163
164 class ReverseInnerIterator;
165 class InnerIterator
166 {
167 enum { IsRowMajor = (int(Rhs::Flags)&RowMajorBit)==RowMajorBit };
168 public:
169
170 EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
171 : m_lhsEval(aEval.m_lhsImpl), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor), m_value(0), m_id(-1), m_innerSize(aEval.m_expr.rhs().innerSize())
172 {
173 this->operator++();
174 }
175
176 EIGEN_STRONG_INLINE InnerIterator& operator++()
177 {
178 ++m_id;
179 if(m_id<m_innerSize)
180 {
181 Scalar lhsVal = m_lhsEval.coeff(IsRowMajor?m_rhsIter.outer():m_id,
182 IsRowMajor?m_id:m_rhsIter.outer());
183 if(m_rhsIter && m_rhsIter.index()==m_id)
184 {
185 m_value = m_functor(lhsVal, m_rhsIter.value());
186 ++m_rhsIter;
187 }
188 else
189 m_value = m_functor(lhsVal, Scalar(0));
190 }
191
192 return *this;
193 }
194
195 EIGEN_STRONG_INLINE Scalar value() const { eigen_internal_assert(m_id<m_innerSize); return m_value; }
196
197 EIGEN_STRONG_INLINE StorageIndex index() const { return m_id; }
198 EIGEN_STRONG_INLINE Index row() const { return IsRowMajor ? m_rhsIter.outer() : m_id; }
199 EIGEN_STRONG_INLINE Index col() const { return IsRowMajor ? m_id : m_rhsIter.outer(); }
200
201 EIGEN_STRONG_INLINE operator bool() const { return m_id<m_innerSize; }
202
203 protected:
204 const evaluator<Lhs> &m_lhsEval;
205 RhsIterator m_rhsIter;
206 const BinaryOp& m_functor;
207 Scalar m_value;
208 StorageIndex m_id;
209 StorageIndex m_innerSize;
210 };
211
212
213 enum {
214 CoeffReadCost = evaluator<Lhs>::CoeffReadCost + evaluator<Rhs>::CoeffReadCost + functor_traits<BinaryOp>::Cost,
215 // Expose storage order of the sparse expression
216 Flags = (XprType::Flags & ~RowMajorBit) | (int(Rhs::Flags)&RowMajorBit)
217 };
218
219 explicit binary_evaluator(const XprType& xpr)
220 : m_functor(xpr.functor()),
221 m_lhsImpl(xpr.lhs()),
222 m_rhsImpl(xpr.rhs()),
223 m_expr(xpr)
224 {
225 EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
226 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
227 }
228
229 inline Index nonZerosEstimate() const {
230 return m_expr.size();
231 }
232
233protected:
234 const BinaryOp m_functor;
235 evaluator<Lhs> m_lhsImpl;
236 evaluator<Rhs> m_rhsImpl;
237 const XprType &m_expr;
238};
239
240// sparse op dense
241template<typename BinaryOp, typename Lhs, typename Rhs>
242struct binary_evaluator<CwiseBinaryOp<BinaryOp, Lhs, Rhs>, IteratorBased, IndexBased>
243 : evaluator_base<CwiseBinaryOp<BinaryOp, Lhs, Rhs> >
244{
245protected:
246 typedef typename evaluator<Lhs>::InnerIterator LhsIterator;
247 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> XprType;
248 typedef typename traits<XprType>::Scalar Scalar;
249 typedef typename XprType::StorageIndex StorageIndex;
250public:
251
252 class ReverseInnerIterator;
253 class InnerIterator
254 {
255 enum { IsRowMajor = (int(Lhs::Flags)&RowMajorBit)==RowMajorBit };
256 public:
257
258 EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
259 : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsEval(aEval.m_rhsImpl), m_functor(aEval.m_functor), m_value(0), m_id(-1), m_innerSize(aEval.m_expr.lhs().innerSize())
260 {
261 this->operator++();
262 }
263
264 EIGEN_STRONG_INLINE InnerIterator& operator++()
265 {
266 ++m_id;
267 if(m_id<m_innerSize)
268 {
269 Scalar rhsVal = m_rhsEval.coeff(IsRowMajor?m_lhsIter.outer():m_id,
270 IsRowMajor?m_id:m_lhsIter.outer());
271 if(m_lhsIter && m_lhsIter.index()==m_id)
272 {
273 m_value = m_functor(m_lhsIter.value(), rhsVal);
274 ++m_lhsIter;
275 }
276 else
277 m_value = m_functor(Scalar(0),rhsVal);
278 }
279
280 return *this;
281 }
282
283 EIGEN_STRONG_INLINE Scalar value() const { eigen_internal_assert(m_id<m_innerSize); return m_value; }
284
285 EIGEN_STRONG_INLINE StorageIndex index() const { return m_id; }
286 EIGEN_STRONG_INLINE Index row() const { return IsRowMajor ? m_lhsIter.outer() : m_id; }
287 EIGEN_STRONG_INLINE Index col() const { return IsRowMajor ? m_id : m_lhsIter.outer(); }
288
289 EIGEN_STRONG_INLINE operator bool() const { return m_id<m_innerSize; }
290
291 protected:
292 LhsIterator m_lhsIter;
293 const evaluator<Rhs> &m_rhsEval;
294 const BinaryOp& m_functor;
295 Scalar m_value;
296 StorageIndex m_id;
297 StorageIndex m_innerSize;
298 };
299
300
301 enum {
302 CoeffReadCost = evaluator<Lhs>::CoeffReadCost + evaluator<Rhs>::CoeffReadCost + functor_traits<BinaryOp>::Cost,
303 // Expose storage order of the sparse expression
304 Flags = (XprType::Flags & ~RowMajorBit) | (int(Lhs::Flags)&RowMajorBit)
305 };
306
307 explicit binary_evaluator(const XprType& xpr)
308 : m_functor(xpr.functor()),
309 m_lhsImpl(xpr.lhs()),
310 m_rhsImpl(xpr.rhs()),
311 m_expr(xpr)
312 {
313 EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
314 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
315 }
316
317 inline Index nonZerosEstimate() const {
318 return m_expr.size();
319 }
320
321protected:
322 const BinaryOp m_functor;
323 evaluator<Lhs> m_lhsImpl;
324 evaluator<Rhs> m_rhsImpl;
325 const XprType &m_expr;
326};
327
328// "sparse .* sparse"
329template<typename T1, typename T2, typename Lhs, typename Rhs>
330struct binary_evaluator<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs>, IteratorBased, IteratorBased>
331 : evaluator_base<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs> >
332{
333protected:
334 typedef scalar_product_op<T1,T2> BinaryOp;
335 typedef typename evaluator<Lhs>::InnerIterator LhsIterator;
336 typedef typename evaluator<Rhs>::InnerIterator RhsIterator;
337 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> XprType;
338 typedef typename XprType::StorageIndex StorageIndex;
339 typedef typename traits<XprType>::Scalar Scalar;
340public:
341
342 class ReverseInnerIterator;
343 class InnerIterator
344 {
345 public:
346
347 EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
348 : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor)
349 {
350 while (m_lhsIter && m_rhsIter && (m_lhsIter.index() != m_rhsIter.index()))
351 {
352 if (m_lhsIter.index() < m_rhsIter.index())
353 ++m_lhsIter;
354 else
355 ++m_rhsIter;
356 }
357 }
358
359 EIGEN_STRONG_INLINE InnerIterator& operator++()
360 {
361 ++m_lhsIter;
362 ++m_rhsIter;
363 while (m_lhsIter && m_rhsIter && (m_lhsIter.index() != m_rhsIter.index()))
364 {
365 if (m_lhsIter.index() < m_rhsIter.index())
366 ++m_lhsIter;
367 else
368 ++m_rhsIter;
369 }
370 return *this;
371 }
372
373 EIGEN_STRONG_INLINE Scalar value() const { return m_functor(m_lhsIter.value(), m_rhsIter.value()); }
374
375 EIGEN_STRONG_INLINE StorageIndex index() const { return m_lhsIter.index(); }
376 EIGEN_STRONG_INLINE Index row() const { return m_lhsIter.row(); }
377 EIGEN_STRONG_INLINE Index col() const { return m_lhsIter.col(); }
378
379 EIGEN_STRONG_INLINE operator bool() const { return (m_lhsIter && m_rhsIter); }
380
381 protected:
382 LhsIterator m_lhsIter;
383 RhsIterator m_rhsIter;
384 const BinaryOp& m_functor;
385 };
386
387
388 enum {
389 CoeffReadCost = evaluator<Lhs>::CoeffReadCost + evaluator<Rhs>::CoeffReadCost + functor_traits<BinaryOp>::Cost,
390 Flags = XprType::Flags
391 };
392
393 explicit binary_evaluator(const XprType& xpr)
394 : m_functor(xpr.functor()),
395 m_lhsImpl(xpr.lhs()),
396 m_rhsImpl(xpr.rhs())
397 {
398 EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
399 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
400 }
401
402 inline Index nonZerosEstimate() const {
403 return (std::min)(m_lhsImpl.nonZerosEstimate(), m_rhsImpl.nonZerosEstimate());
404 }
405
406protected:
407 const BinaryOp m_functor;
408 evaluator<Lhs> m_lhsImpl;
409 evaluator<Rhs> m_rhsImpl;
410};
411
412// "dense .* sparse"
413template<typename T1, typename T2, typename Lhs, typename Rhs>
414struct binary_evaluator<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs>, IndexBased, IteratorBased>
415 : evaluator_base<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs> >
416{
417protected:
418 typedef scalar_product_op<T1,T2> BinaryOp;
419 typedef evaluator<Lhs> LhsEvaluator;
420 typedef typename evaluator<Rhs>::InnerIterator RhsIterator;
421 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> XprType;
422 typedef typename XprType::StorageIndex StorageIndex;
423 typedef typename traits<XprType>::Scalar Scalar;
424public:
425
426 class ReverseInnerIterator;
427 class InnerIterator
428 {
429 enum { IsRowMajor = (int(Rhs::Flags)&RowMajorBit)==RowMajorBit };
430
431 public:
432
433 EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
434 : m_lhsEval(aEval.m_lhsImpl), m_rhsIter(aEval.m_rhsImpl,outer), m_functor(aEval.m_functor), m_outer(outer)
435 {}
436
437 EIGEN_STRONG_INLINE InnerIterator& operator++()
438 {
439 ++m_rhsIter;
440 return *this;
441 }
442
443 EIGEN_STRONG_INLINE Scalar value() const
444 { return m_functor(m_lhsEval.coeff(IsRowMajor?m_outer:m_rhsIter.index(),IsRowMajor?m_rhsIter.index():m_outer), m_rhsIter.value()); }
445
446 EIGEN_STRONG_INLINE StorageIndex index() const { return m_rhsIter.index(); }
447 EIGEN_STRONG_INLINE Index row() const { return m_rhsIter.row(); }
448 EIGEN_STRONG_INLINE Index col() const { return m_rhsIter.col(); }
449
450 EIGEN_STRONG_INLINE operator bool() const { return m_rhsIter; }
451
452 protected:
453 const LhsEvaluator &m_lhsEval;
454 RhsIterator m_rhsIter;
455 const BinaryOp& m_functor;
456 const Index m_outer;
457 };
458
459
460 enum {
461 CoeffReadCost = evaluator<Lhs>::CoeffReadCost + evaluator<Rhs>::CoeffReadCost + functor_traits<BinaryOp>::Cost,
462 // Expose storage order of the sparse expression
463 Flags = (XprType::Flags & ~RowMajorBit) | (int(Rhs::Flags)&RowMajorBit)
464 };
465
466 explicit binary_evaluator(const XprType& xpr)
467 : m_functor(xpr.functor()),
468 m_lhsImpl(xpr.lhs()),
469 m_rhsImpl(xpr.rhs())
470 {
471 EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
472 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
473 }
474
475 inline Index nonZerosEstimate() const {
476 return m_rhsImpl.nonZerosEstimate();
477 }
478
479protected:
480 const BinaryOp m_functor;
481 evaluator<Lhs> m_lhsImpl;
482 evaluator<Rhs> m_rhsImpl;
483};
484
485// "sparse .* dense"
486template<typename T1, typename T2, typename Lhs, typename Rhs>
487struct binary_evaluator<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs>, IteratorBased, IndexBased>
488 : evaluator_base<CwiseBinaryOp<scalar_product_op<T1,T2>, Lhs, Rhs> >
489{
490protected:
491 typedef scalar_product_op<T1,T2> BinaryOp;
492 typedef typename evaluator<Lhs>::InnerIterator LhsIterator;
493 typedef evaluator<Rhs> RhsEvaluator;
494 typedef CwiseBinaryOp<BinaryOp, Lhs, Rhs> XprType;
495 typedef typename XprType::StorageIndex StorageIndex;
496 typedef typename traits<XprType>::Scalar Scalar;
497public:
498
499 class ReverseInnerIterator;
500 class InnerIterator
501 {
502 enum { IsRowMajor = (int(Lhs::Flags)&RowMajorBit)==RowMajorBit };
503
504 public:
505
506 EIGEN_STRONG_INLINE InnerIterator(const binary_evaluator& aEval, Index outer)
507 : m_lhsIter(aEval.m_lhsImpl,outer), m_rhsEval(aEval.m_rhsImpl), m_functor(aEval.m_functor), m_outer(outer)
508 {}
509
510 EIGEN_STRONG_INLINE InnerIterator& operator++()
511 {
512 ++m_lhsIter;
513 return *this;
514 }
515
516 EIGEN_STRONG_INLINE Scalar value() const
517 { return m_functor(m_lhsIter.value(),
518 m_rhsEval.coeff(IsRowMajor?m_outer:m_lhsIter.index(),IsRowMajor?m_lhsIter.index():m_outer)); }
519
520 EIGEN_STRONG_INLINE StorageIndex index() const { return m_lhsIter.index(); }
521 EIGEN_STRONG_INLINE Index row() const { return m_lhsIter.row(); }
522 EIGEN_STRONG_INLINE Index col() const { return m_lhsIter.col(); }
523
524 EIGEN_STRONG_INLINE operator bool() const { return m_lhsIter; }
525
526 protected:
527 LhsIterator m_lhsIter;
528 const evaluator<Rhs> &m_rhsEval;
529 const BinaryOp& m_functor;
530 const Index m_outer;
531 };
532
533
534 enum {
535 CoeffReadCost = evaluator<Lhs>::CoeffReadCost + evaluator<Rhs>::CoeffReadCost + functor_traits<BinaryOp>::Cost,
536 // Expose storage order of the sparse expression
537 Flags = (XprType::Flags & ~RowMajorBit) | (int(Lhs::Flags)&RowMajorBit)
538 };
539
540 explicit binary_evaluator(const XprType& xpr)
541 : m_functor(xpr.functor()),
542 m_lhsImpl(xpr.lhs()),
543 m_rhsImpl(xpr.rhs())
544 {
545 EIGEN_INTERNAL_CHECK_COST_VALUE(functor_traits<BinaryOp>::Cost);
546 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
547 }
548
549 inline Index nonZerosEstimate() const {
550 return m_lhsImpl.nonZerosEstimate();
551 }
552
553protected:
554 const BinaryOp m_functor;
555 evaluator<Lhs> m_lhsImpl;
556 evaluator<Rhs> m_rhsImpl;
557};
558
559}
560
561/***************************************************************************
562* Implementation of SparseMatrixBase and SparseCwise functions/operators
563***************************************************************************/
564
565template<typename Derived>
566template<typename OtherDerived>
567EIGEN_STRONG_INLINE Derived &
568SparseMatrixBase<Derived>::operator-=(const SparseMatrixBase<OtherDerived> &other)
569{
570 return derived() = derived() - other.derived();
571}
572
573template<typename Derived>
574template<typename OtherDerived>
575EIGEN_STRONG_INLINE Derived &
576SparseMatrixBase<Derived>::operator+=(const SparseMatrixBase<OtherDerived>& other)
577{
578 return derived() = derived() + other.derived();
579}
580
581template<typename Derived>
582template<typename OtherDerived>
583Derived& SparseMatrixBase<Derived>::operator+=(const DiagonalBase<OtherDerived>& other)
584{
585 call_assignment_no_alias(derived(), other.derived(), internal::add_assign_op<Scalar,typename OtherDerived::Scalar>());
586 return derived();
587}
588
589template<typename Derived>
590template<typename OtherDerived>
591Derived& SparseMatrixBase<Derived>::operator-=(const DiagonalBase<OtherDerived>& other)
592{
593 call_assignment_no_alias(derived(), other.derived(), internal::sub_assign_op<Scalar,typename OtherDerived::Scalar>());
594 return derived();
595}
596
597template<typename Derived>
598template<typename OtherDerived>
599EIGEN_STRONG_INLINE const typename SparseMatrixBase<Derived>::template CwiseProductDenseReturnType<OtherDerived>::Type
600SparseMatrixBase<Derived>::cwiseProduct(const MatrixBase<OtherDerived> &other) const
601{
602 return typename CwiseProductDenseReturnType<OtherDerived>::Type(derived(), other.derived());
603}
604
605template<typename DenseDerived, typename SparseDerived>
606EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_sum_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>
607operator+(const MatrixBase<DenseDerived> &a, const SparseMatrixBase<SparseDerived> &b)
608{
609 return CwiseBinaryOp<internal::scalar_sum_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>(a.derived(), b.derived());
610}
611
612template<typename SparseDerived, typename DenseDerived>
613EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_sum_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>
614operator+(const SparseMatrixBase<SparseDerived> &a, const MatrixBase<DenseDerived> &b)
615{
616 return CwiseBinaryOp<internal::scalar_sum_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>(a.derived(), b.derived());
617}
618
619template<typename DenseDerived, typename SparseDerived>
620EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_difference_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>
621operator-(const MatrixBase<DenseDerived> &a, const SparseMatrixBase<SparseDerived> &b)
622{
623 return CwiseBinaryOp<internal::scalar_difference_op<typename DenseDerived::Scalar,typename SparseDerived::Scalar>, const DenseDerived, const SparseDerived>(a.derived(), b.derived());
624}
625
626template<typename SparseDerived, typename DenseDerived>
627EIGEN_STRONG_INLINE const CwiseBinaryOp<internal::scalar_difference_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>
628operator-(const SparseMatrixBase<SparseDerived> &a, const MatrixBase<DenseDerived> &b)
629{
630 return CwiseBinaryOp<internal::scalar_difference_op<typename SparseDerived::Scalar,typename DenseDerived::Scalar>, const SparseDerived, const DenseDerived>(a.derived(), b.derived());
631}
632
633} // end namespace Eigen
634
635#endif // EIGEN_SPARSE_CWISE_BINARY_OP_H
const unsigned int RowMajorBit
Definition: Constants.h:61
Namespace containing all symbols from the Eigen library.
Definition: Core:287
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:33