AmbiVector.h
Go to the documentation of this file.
00001 // This file is part of Eigen, a lightweight C++ template library
00002 // for linear algebra.
00003 //
00004 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
00005 //
00006 // Eigen is free software; you can redistribute it and/or
00007 // modify it under the terms of the GNU Lesser General Public
00008 // License as published by the Free Software Foundation; either
00009 // version 3 of the License, or (at your option) any later version.
00010 //
00011 // Alternatively, you can redistribute it and/or
00012 // modify it under the terms of the GNU General Public License as
00013 // published by the Free Software Foundation; either version 2 of
00014 // the License, or (at your option) any later version.
00015 //
00016 // Eigen is distributed in the hope that it will be useful, but WITHOUT ANY
00017 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
00018 // FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License or the
00019 // GNU General Public License for more details.
00020 //
00021 // You should have received a copy of the GNU Lesser General Public
00022 // License and a copy of the GNU General Public License along with
00023 // Eigen. If not, see <http://www.gnu.org/licenses/>.
00024 
00025 #ifndef EIGEN_AMBIVECTOR_H
00026 #define EIGEN_AMBIVECTOR_H
00027 
00028 namespace Eigen { 
00029 
00030 namespace internal {
00031 
00037 template<typename _Scalar, typename _Index>
00038 class AmbiVector
00039 {
00040   public:
00041     typedef _Scalar Scalar;
00042     typedef _Index Index;
00043     typedef typename NumTraits<Scalar>::Real RealScalar;
00044 
00045     AmbiVector(Index size)
00046       : m_buffer(0), m_zero(0), m_size(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
00047     {
00048       resize(size);
00049     }
00050 
00051     void init(double estimatedDensity);
00052     void init(int mode);
00053 
00054     Index nonZeros() const;
00055 
00057     void setBounds(Index start, Index end) { m_start = start; m_end = end; }
00058 
00059     void setZero();
00060 
00061     void restart();
00062     Scalar& coeffRef(Index i);
00063     Scalar& coeff(Index i);
00064 
00065     class Iterator;
00066 
00067     ~AmbiVector() { delete[] m_buffer; }
00068 
00069     void resize(Index size)
00070     {
00071       if (m_allocatedSize < size)
00072         reallocate(size);
00073       m_size = size;
00074     }
00075 
00076     Index size() const { return m_size; }
00077 
00078   protected:
00079 
00080     void reallocate(Index size)
00081     {
00082       // if the size of the matrix is not too large, let's allocate a bit more than needed such
00083       // that we can handle dense vector even in sparse mode.
00084       delete[] m_buffer;
00085       if (size<1000)
00086       {
00087         Index allocSize = (size * sizeof(ListEl))/sizeof(Scalar);
00088         m_allocatedElements = (allocSize*sizeof(Scalar))/sizeof(ListEl);
00089         m_buffer = new Scalar[allocSize];
00090       }
00091       else
00092       {
00093         m_allocatedElements = (size*sizeof(Scalar))/sizeof(ListEl);
00094         m_buffer = new Scalar[size];
00095       }
00096       m_size = size;
00097       m_start = 0;
00098       m_end = m_size;
00099     }
00100 
00101     void reallocateSparse()
00102     {
00103       Index copyElements = m_allocatedElements;
00104       m_allocatedElements = (std::min)(Index(m_allocatedElements*1.5),m_size);
00105       Index allocSize = m_allocatedElements * sizeof(ListEl);
00106       allocSize = allocSize/sizeof(Scalar) + (allocSize%sizeof(Scalar)>0?1:0);
00107       Scalar* newBuffer = new Scalar[allocSize];
00108       memcpy(newBuffer,  m_buffer,  copyElements * sizeof(ListEl));
00109       delete[] m_buffer;
00110       m_buffer = newBuffer;
00111     }
00112 
00113   protected:
00114     // element type of the linked list
00115     struct ListEl
00116     {
00117       Index next;
00118       Index index;
00119       Scalar value;
00120     };
00121 
00122     // used to store data in both mode
00123     Scalar* m_buffer;
00124     Scalar m_zero;
00125     Index m_size;
00126     Index m_start;
00127     Index m_end;
00128     Index m_allocatedSize;
00129     Index m_allocatedElements;
00130     Index m_mode;
00131 
00132     // linked list mode
00133     Index m_llStart;
00134     Index m_llCurrent;
00135     Index m_llSize;
00136 };
00137 
00139 template<typename _Scalar,typename _Index>
00140 _Index AmbiVector<_Scalar,_Index>::nonZeros() const
00141 {
00142   if (m_mode==IsSparse)
00143     return m_llSize;
00144   else
00145     return m_end - m_start;
00146 }
00147 
00148 template<typename _Scalar,typename _Index>
00149 void AmbiVector<_Scalar,_Index>::init(double estimatedDensity)
00150 {
00151   if (estimatedDensity>0.1)
00152     init(IsDense);
00153   else
00154     init(IsSparse);
00155 }
00156 
00157 template<typename _Scalar,typename _Index>
00158 void AmbiVector<_Scalar,_Index>::init(int mode)
00159 {
00160   m_mode = mode;
00161   if (m_mode==IsSparse)
00162   {
00163     m_llSize = 0;
00164     m_llStart = -1;
00165   }
00166 }
00167 
00173 template<typename _Scalar,typename _Index>
00174 void AmbiVector<_Scalar,_Index>::restart()
00175 {
00176   m_llCurrent = m_llStart;
00177 }
00178 
00180 template<typename _Scalar,typename _Index>
00181 void AmbiVector<_Scalar,_Index>::setZero()
00182 {
00183   if (m_mode==IsDense)
00184   {
00185     for (Index i=m_start; i<m_end; ++i)
00186       m_buffer[i] = Scalar(0);
00187   }
00188   else
00189   {
00190     eigen_assert(m_mode==IsSparse);
00191     m_llSize = 0;
00192     m_llStart = -1;
00193   }
00194 }
00195 
00196 template<typename _Scalar,typename _Index>
00197 _Scalar& AmbiVector<_Scalar,_Index>::coeffRef(_Index i)
00198 {
00199   if (m_mode==IsDense)
00200     return m_buffer[i];
00201   else
00202   {
00203     ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
00204     // TODO factorize the following code to reduce code generation
00205     eigen_assert(m_mode==IsSparse);
00206     if (m_llSize==0)
00207     {
00208       // this is the first element
00209       m_llStart = 0;
00210       m_llCurrent = 0;
00211       ++m_llSize;
00212       llElements[0].value = Scalar(0);
00213       llElements[0].index = i;
00214       llElements[0].next = -1;
00215       return llElements[0].value;
00216     }
00217     else if (i<llElements[m_llStart].index)
00218     {
00219       // this is going to be the new first element of the list
00220       ListEl& el = llElements[m_llSize];
00221       el.value = Scalar(0);
00222       el.index = i;
00223       el.next = m_llStart;
00224       m_llStart = m_llSize;
00225       ++m_llSize;
00226       m_llCurrent = m_llStart;
00227       return el.value;
00228     }
00229     else
00230     {
00231       Index nextel = llElements[m_llCurrent].next;
00232       eigen_assert(i>=llElements[m_llCurrent].index && "you must call restart() before inserting an element with lower or equal index");
00233       while (nextel >= 0 && llElements[nextel].index<=i)
00234       {
00235         m_llCurrent = nextel;
00236         nextel = llElements[nextel].next;
00237       }
00238 
00239       if (llElements[m_llCurrent].index==i)
00240       {
00241         // the coefficient already exists and we found it !
00242         return llElements[m_llCurrent].value;
00243       }
00244       else
00245       {
00246         if (m_llSize>=m_allocatedElements)
00247         {
00248           reallocateSparse();
00249           llElements = reinterpret_cast<ListEl*>(m_buffer);
00250         }
00251         eigen_internal_assert(m_llSize<m_allocatedElements && "internal error: overflow in sparse mode");
00252         // let's insert a new coefficient
00253         ListEl& el = llElements[m_llSize];
00254         el.value = Scalar(0);
00255         el.index = i;
00256         el.next = llElements[m_llCurrent].next;
00257         llElements[m_llCurrent].next = m_llSize;
00258         ++m_llSize;
00259         return el.value;
00260       }
00261     }
00262   }
00263 }
00264 
00265 template<typename _Scalar,typename _Index>
00266 _Scalar& AmbiVector<_Scalar,_Index>::coeff(_Index i)
00267 {
00268   if (m_mode==IsDense)
00269     return m_buffer[i];
00270   else
00271   {
00272     ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
00273     eigen_assert(m_mode==IsSparse);
00274     if ((m_llSize==0) || (i<llElements[m_llStart].index))
00275     {
00276       return m_zero;
00277     }
00278     else
00279     {
00280       Index elid = m_llStart;
00281       while (elid >= 0 && llElements[elid].index<i)
00282         elid = llElements[elid].next;
00283 
00284       if (llElements[elid].index==i)
00285         return llElements[m_llCurrent].value;
00286       else
00287         return m_zero;
00288     }
00289   }
00290 }
00291 
00293 template<typename _Scalar,typename _Index>
00294 class AmbiVector<_Scalar,_Index>::Iterator
00295 {
00296   public:
00297     typedef _Scalar Scalar;
00298     typedef typename NumTraits<Scalar>::Real RealScalar;
00299 
00306     Iterator(const AmbiVector& vec, RealScalar epsilon = 0)
00307       : m_vector(vec)
00308     {
00309       m_epsilon = epsilon;
00310       m_isDense = m_vector.m_mode==IsDense;
00311       if (m_isDense)
00312       {
00313         m_currentEl = 0;   // this is to avoid a compilation warning
00314         m_cachedValue = 0; // this is to avoid a compilation warning
00315         m_cachedIndex = m_vector.m_start-1;
00316         ++(*this);
00317       }
00318       else
00319       {
00320         ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
00321         m_currentEl = m_vector.m_llStart;
00322         while (m_currentEl>=0 && internal::abs(llElements[m_currentEl].value)<=m_epsilon)
00323           m_currentEl = llElements[m_currentEl].next;
00324         if (m_currentEl<0)
00325         {
00326           m_cachedValue = 0; // this is to avoid a compilation warning
00327           m_cachedIndex = -1;
00328         }
00329         else
00330         {
00331           m_cachedIndex = llElements[m_currentEl].index;
00332           m_cachedValue = llElements[m_currentEl].value;
00333         }
00334       }
00335     }
00336 
00337     Index index() const { return m_cachedIndex; }
00338     Scalar value() const { return m_cachedValue; }
00339 
00340     operator bool() const { return m_cachedIndex>=0; }
00341 
00342     Iterator& operator++()
00343     {
00344       if (m_isDense)
00345       {
00346         do {
00347           ++m_cachedIndex;
00348         } while (m_cachedIndex<m_vector.m_end && internal::abs(m_vector.m_buffer[m_cachedIndex])<m_epsilon);
00349         if (m_cachedIndex<m_vector.m_end)
00350           m_cachedValue = m_vector.m_buffer[m_cachedIndex];
00351         else
00352           m_cachedIndex=-1;
00353       }
00354       else
00355       {
00356         ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
00357         do {
00358           m_currentEl = llElements[m_currentEl].next;
00359         } while (m_currentEl>=0 && internal::abs(llElements[m_currentEl].value)<m_epsilon);
00360         if (m_currentEl<0)
00361         {
00362           m_cachedIndex = -1;
00363         }
00364         else
00365         {
00366           m_cachedIndex = llElements[m_currentEl].index;
00367           m_cachedValue = llElements[m_currentEl].value;
00368         }
00369       }
00370       return *this;
00371     }
00372 
00373   protected:
00374     const AmbiVector& m_vector; // the target vector
00375     Index m_currentEl;            // the current element in sparse/linked-list mode
00376     RealScalar m_epsilon;       // epsilon used to prune zero coefficients
00377     Index m_cachedIndex;          // current coordinate
00378     Scalar m_cachedValue;       // current value
00379     bool m_isDense;             // mode of the vector
00380 };
00381 
00382 } // end namespace internal
00383 
00384 } // end namespace Eigen
00385 
00386 #endif // EIGEN_AMBIVECTOR_H