135 internal::LowerBoundIndex lower_bound(
Index row,
Index col)
const
137 eigen_internal_assert(row>=0 && row<this->
rows() && col>=0 && col<this->
cols());
139 const Index outer = Derived::IsRowMajor ? row : col;
140 const Index inner = Derived::IsRowMajor ? col : row;
144 eigen_assert(end>=start &&
"you are using a non finalized sparse matrix or written coefficient does not exist");
145 internal::LowerBoundIndex p;
147 p.found = (p.value<end) && (this->
innerIndexPtr()[p.value]==inner);
157template<
typename Derived>
162 : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
165 InnerIterator(
const InnerIterator& other)
166 : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
169 InnerIterator& operator=(
const InnerIterator& other)
171 m_values = other.m_values;
172 m_indices = other.m_indices;
173 const_cast<OuterType&
>(m_outer).setValue(other.m_outer.value());
182 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
185 m_end = mat.nonZeros();
189 m_id = mat.outerIndexPtr()[outer];
190 if(mat.isCompressed())
191 m_end = mat.outerIndexPtr()[outer+1];
193 m_end = m_id + mat.innerNonZeroPtr()[outer];
200 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
203 explicit InnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>& data)
204 : m_values(data.
valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.
size())
206 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
209 inline InnerIterator& operator++() { m_id++;
return *
this; }
210 inline InnerIterator& operator+=(
Index i) { m_id += i ;
return *
this; }
214 InnerIterator result = *
this;
219 inline const Scalar& value()
const {
return m_values[m_id]; }
220 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id]); }
222 inline StorageIndex index()
const {
return m_indices[m_id]; }
223 inline Index outer()
const {
return m_outer.value(); }
224 inline Index row()
const {
return IsRowMajor ? m_outer.value() : index(); }
225 inline Index col()
const {
return IsRowMajor ? index() : m_outer.value(); }
227 inline operator bool()
const {
return (m_id < m_end); }
230 const Scalar* m_values;
232 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
233 const OuterType m_outer;
240 template<
typename T> InnerIterator(
const SparseMatrixBase<T>&,
Index outer);
243template<
typename Derived>
250 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
253 m_id = mat.nonZeros();
257 m_start = mat.outerIndexPtr()[outer];
258 if(mat.isCompressed())
259 m_id = mat.outerIndexPtr()[outer+1];
261 m_id = m_start + mat.innerNonZeroPtr()[outer];
268 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
271 explicit ReverseInnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>& data)
272 : m_values(data.
valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.
size())
274 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
277 inline ReverseInnerIterator& operator--() { --m_id;
return *
this; }
278 inline ReverseInnerIterator& operator-=(
Index i) { m_id -= i;
return *
this; }
282 ReverseInnerIterator result = *
this;
287 inline const Scalar& value()
const {
return m_values[m_id-1]; }
288 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id-1]); }
290 inline StorageIndex index()
const {
return m_indices[m_id-1]; }
291 inline Index outer()
const {
return m_outer.value(); }
292 inline Index row()
const {
return IsRowMajor ? m_outer.value() : index(); }
293 inline Index col()
const {
return IsRowMajor ? index() : m_outer.value(); }
295 inline operator bool()
const {
return (m_id > m_start); }
298 const Scalar* m_values;
300 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
301 const OuterType m_outer;
308template<
typename Derived>
309struct evaluator<SparseCompressedBase<Derived> >
310 : evaluator_base<Derived>
312 typedef typename Derived::Scalar Scalar;
313 typedef typename Derived::InnerIterator InnerIterator;
316 CoeffReadCost = NumTraits<Scalar>::ReadCost,
317 Flags = Derived::Flags
320 evaluator() : m_matrix(0), m_zero(0)
322 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
324 explicit evaluator(
const Derived &mat) : m_matrix(&mat), m_zero(0)
326 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
329 inline Index nonZerosEstimate()
const {
330 return m_matrix->nonZeros();
333 operator Derived&() {
return m_matrix->const_cast_derived(); }
334 operator const Derived&()
const {
return *m_matrix; }
336 typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
337 const Scalar& coeff(
Index row,
Index col)
const
339 Index p = find(row,col);
344 return m_matrix->const_cast_derived().valuePtr()[p];
349 Index p = find(row,col);
350 eigen_assert(p!=
Dynamic &&
"written coefficient does not exist");
351 return m_matrix->const_cast_derived().valuePtr()[p];
358 internal::LowerBoundIndex p = m_matrix->lower_bound(row,col);
359 return p.found ? p.value :
Dynamic;
362 const Derived *m_matrix;