epoc32/include/stdapis/stlportv5/stl/_rope.h
author William Roberts <williamr@symbian.org>
Wed, 31 Mar 2010 12:33:34 +0100
branchSymbian3
changeset 4 837f303aceeb
parent 3 e1b950c65cb4
permissions -rw-r--r--
Current Symbian^3 public API header files (from PDK 3.0.h)
This is the epoc32/include tree with the "platform" subtrees removed, and
all but a selected few mbg and rsg files removed.
     1 /*
     2  * Portions Copyright (c) 2008 Nokia Corporation and/or its subsidiary(-ies). All rights reserved.
     3  *
     4  * Copyright (c) 1996,1997
     5  * Silicon Graphics Computer Systems, Inc.
     6  *
     7  * Copyright (c) 1997
     8  * Moscow Center for SPARC Technology
     9  *
    10  * Copyright (c) 1999
    11  * Boris Fomitchev
    12  *
    13  * This material is provided "as is", with absolutely no warranty expressed
    14  * or implied. Any use is at your own risk.
    15  *
    16  * Permission to use or copy this software for any purpose is hereby granted
    17  * without fee, provided the above notices are retained on all copies.
    18  * Permission to modify the code and to distribute modified code is granted,
    19  * provided the above notices are retained, and a notice that the code was
    20  * modified is included with the above copyright notice.
    21  *
    22  */
    23 
    24 /* NOTE: This is an internal header file, included by other STL headers.
    25  *   You should not attempt to use it directly.
    26  */
    27 
    28 // rope<_CharT,_Alloc> is a sequence of _CharT.
    29 // Ropes appear to be mutable, but update operations
    30 // really copy enough of the data structure to leave the original
    31 // valid.  Thus ropes can be logically copied by just copying
    32 // a pointer value.
    33 
    34 #ifndef _STLP_INTERNAL_ROPE_H
    35 #define _STLP_INTERNAL_ROPE_H
    36 
    37 #ifndef _STLP_INTERNAL_ALGOBASE_H
    38 #  include <stl/_algobase.h>
    39 #endif
    40 
    41 #ifndef _STLP_IOSFWD
    42 #  include <iosfwd>
    43 #endif
    44 
    45 #ifndef _STLP_INTERNAL_ALLOC_H
    46 #  include <stl/_alloc.h>
    47 #endif
    48 
    49 #ifndef _STLP_INTERNAL_ITERATOR_H
    50 #  include <stl/_iterator.h>
    51 #endif
    52 
    53 #ifndef _STLP_INTERNAL_ALGO_H
    54 #  include <stl/_algo.h>
    55 #endif
    56 
    57 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
    58 #  include <stl/_function_base.h>
    59 #endif
    60 
    61 #ifndef _STLP_INTERNAL_NUMERIC_H
    62 #  include <stl/_numeric.h>
    63 #endif
    64 
    65 #ifndef _STLP_INTERNAL_HASH_FUN_H
    66 #  include <stl/_hash_fun.h>
    67 #endif
    68 
    69 #ifndef _STLP_CHAR_TRAITS_H
    70 #  include <stl/char_traits.h>
    71 #endif
    72 
    73 #ifndef _STLP_INTERNAL_THREADS_H
    74 #  include <stl/_threads.h>
    75 #endif
    76 
    77 #ifdef _STLP_SGI_THREADS
    78 #  include <mutex.h>
    79 #endif
    80 
    81 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
    82 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
    83 #elif defined(__MRC__)||defined(__SC__)
    84 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
    85 #else
    86 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
    87 #endif
    88 
    89 _STLP_BEGIN_NAMESPACE
    90 
    91 // First a lot of forward declarations.  The standard seems to require
    92 // much stricter "declaration before use" than many of the implementations
    93 // that preceded it.
    94 template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
    95 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
    96 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
    97 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
    98 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
    99 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
   100 template<class _CharT, class _Alloc> class _Rope_iterator;
   101 template<class _CharT, class _Alloc> class _Rope_const_iterator;
   102 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
   103 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
   104 
   105 _STLP_MOVE_TO_PRIV_NAMESPACE
   106 
   107 // Some helpers, so we can use the power algorithm on ropes.
   108 // See below for why this isn't local to the implementation.
   109 
   110 // This uses a nonstandard refcount convention.
   111 // The result has refcount 0.
   112 template<class _CharT, class _Alloc>
   113 struct _Rope_Concat_fn
   114   : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
   115                            rope<_CharT,_Alloc> > {
   116   rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
   117                                   const rope<_CharT,_Alloc>& __y) {
   118     return __x + __y;
   119   }
   120 };
   121 
   122 template <class _CharT, class _Alloc>
   123 inline
   124 rope<_CharT,_Alloc>
   125 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
   126 { return rope<_CharT,_Alloc>(); }
   127 
   128 _STLP_MOVE_TO_STD_NAMESPACE
   129 
   130 // Store an eos
   131 template <class _CharT>
   132 inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
   133 { *__p = 0; }
   134 
   135 template <class _CharT>
   136 inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
   137 { _STLP_STD::_Construct(__p); }
   138 
   139 template <class _CharT>
   140 inline void _S_construct_null(_CharT *__p) {
   141   typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
   142   _S_construct_null_aux(__p, _Char_Is_Integral());
   143 }
   144 
   145 // char_producers are logically functions that generate a section of
   146 // a string.  These can be converted to ropes.  The resulting rope
   147 // invokes the char_producer on demand.  This allows, for example,
   148 // files to be viewed as ropes without reading the entire file.
   149 template <class _CharT>
   150 class char_producer {
   151 public:
   152   virtual ~char_producer() {}
   153   virtual void operator()(size_t __start_pos, size_t __len,
   154                           _CharT* __buffer) = 0;
   155   // Buffer should really be an arbitrary output iterator.
   156   // That way we could flatten directly into an ostream, etc.
   157   // This is thoroughly impossible, since iterator types don't
   158   // have runtime descriptions.
   159 };
   160 
   161 // Sequence buffers:
   162 //
   163 // Sequence must provide an append operation that appends an
   164 // array to the sequence.  Sequence buffers are useful only if
   165 // appending an entire array is cheaper than appending element by element.
   166 // This is true for many string representations.
   167 // This should  perhaps inherit from ostream<sequence::value_type>
   168 // and be implemented correspondingly, so that they can be used
   169 // for formatted.  For the sake of portability, we don't do this yet.
   170 //
   171 // For now, sequence buffers behave as output iterators.  But they also
   172 // behave a little like basic_ostringstream<sequence::value_type> and a
   173 // little like containers.
   174 
   175 template<class _Sequence
   176 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
   177        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
   178          , size_t _Buf_sz = 100
   179 #   if defined(__sgi) && !defined(__GNUC__)
   180 #   define __TYPEDEF_WORKAROUND
   181          ,class _V = typename _Sequence::value_type
   182 #   endif /* __sgi */
   183 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   184          >
   185 // The 3rd parameter works around a common compiler bug.
   186 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
   187 public:
   188 # ifndef __TYPEDEF_WORKAROUND
   189   typedef typename _Sequence::value_type value_type;
   190   typedef sequence_buffer<_Sequence
   191 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
   192        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
   193   , _Buf_sz
   194   > _Self;
   195 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   196   > _Self;
   197   enum { _Buf_sz = 100};
   198 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   199   // # endif
   200 # else /* __TYPEDEF_WORKAROUND */
   201   typedef _V value_type;
   202   typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
   203 # endif /* __TYPEDEF_WORKAROUND */
   204 protected:
   205   _Sequence* _M_prefix;
   206   value_type _M_buffer[_Buf_sz];
   207   size_t     _M_buf_count;
   208 public:
   209   void flush() {
   210     _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
   211     _M_buf_count = 0;
   212   }
   213   ~sequence_buffer() { flush(); }
   214   sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
   215   sequence_buffer(const _Self& __x) {
   216     _M_prefix = __x._M_prefix;
   217     _M_buf_count = __x._M_buf_count;
   218     copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
   219   }
   220   sequence_buffer(_Self& __x) {
   221     __x.flush();
   222     _M_prefix = __x._M_prefix;
   223     _M_buf_count = 0;
   224   }
   225   sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
   226   _Self& operator= (_Self& __x) {
   227     __x.flush();
   228     _M_prefix = __x._M_prefix;
   229     _M_buf_count = 0;
   230     return *this;
   231   }
   232   _Self& operator= (const _Self& __x) {
   233     _M_prefix = __x._M_prefix;
   234     _M_buf_count = __x._M_buf_count;
   235     copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
   236     return *this;
   237   }
   238   void push_back(value_type __x) {
   239     if (_M_buf_count < _Buf_sz) {
   240       _M_buffer[_M_buf_count] = __x;
   241       ++_M_buf_count;
   242     } else {
   243       flush();
   244       _M_buffer[0] = __x;
   245       _M_buf_count = 1;
   246     }
   247   }
   248   void append(const value_type *__s, size_t __len) {
   249     if (__len + _M_buf_count <= _Buf_sz) {
   250       size_t __i = _M_buf_count;
   251       size_t __j = 0;
   252       for (; __j < __len; __i++, __j++) {
   253         _M_buffer[__i] = __s[__j];
   254       }
   255       _M_buf_count += __len;
   256     } else if (0 == _M_buf_count) {
   257       _M_prefix->append(__s, __s + __len);
   258     } else {
   259       flush();
   260       append(__s, __len);
   261     }
   262   }
   263   _Self& write(const value_type *__s, size_t __len) {
   264     append(__s, __len);
   265     return *this;
   266   }
   267   _Self& put(value_type __x) {
   268     push_back(__x);
   269     return *this;
   270   }
   271   _Self& operator=(const value_type& __rhs) {
   272     push_back(__rhs);
   273     return *this;
   274   }
   275   _Self& operator*() { return *this; }
   276   _Self& operator++() { return *this; }
   277   _Self& operator++(int) { return *this; }
   278 };
   279 
   280 // The following should be treated as private, at least for now.
   281 template<class _CharT>
   282 class _Rope_char_consumer {
   283 #if !defined (_STLP_MEMBER_TEMPLATES)
   284 public:
   285   //Without member templates we have to use run-time parameterization.
   286   // The symmetry with char_producer is accidental and temporary.
   287   virtual ~_Rope_char_consumer() {}
   288   virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
   289 #endif
   290 };
   291 
   292 //
   293 // What follows should really be local to rope.  Unfortunately,
   294 // that doesn't work, since it makes it impossible to define generic
   295 // equality on rope iterators.  According to the draft standard, the
   296 // template parameters for such an equality operator cannot be inferred
   297 // from the occurence of a member class as a parameter.
   298 // (SGI compilers in fact allow this, but the __result wouldn't be
   299 // portable.)
   300 // Similarly, some of the static member functions are member functions
   301 // only to avoid polluting the global namespace, and to circumvent
   302 // restrictions on type inference for template functions.
   303 //
   304 
   305 //
   306 // The internal data structure for representing a rope.  This is
   307 // private to the implementation.  A rope is really just a pointer
   308 // to one of these.
   309 //
   310 // A few basic functions for manipulating this data structure
   311 // are members of _RopeRep.  Most of the more complex algorithms
   312 // are implemented as rope members.
   313 //
   314 // Some of the static member functions of _RopeRep have identically
   315 // named functions in rope that simply invoke the _RopeRep versions.
   316 //
   317 
   318 template<class _CharT, class _Alloc>
   319 struct _Rope_RopeRep
   320   : public _Refcount_Base
   321 {
   322   typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
   323 public:
   324   //
   325   // GAB: 11/09/05
   326   //
   327   // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
   328   // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
   329   // but this addition causes the sunpro compiler to complain about
   330   // multiple declarations during the initialization of "_S_min_len".
   331   // Changed to be a fixed value and the sunpro compiler appears to
   332   // be happy???
   333   //
   334 #  define __ROPE_MAX_DEPTH  45
   335 #  define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
   336   enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
   337   enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
   338   // Apparently needed by VC++
   339   // The data fields of leaves are allocated with some
   340   // extra space, to accomodate future growth and for basic
   341   // character types, to hold a trailing eos character.
   342   enum { _S_alloc_granularity = 8 };
   343 
   344   _Tag _M_tag:8;
   345   bool _M_is_balanced:8;
   346 
   347   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   348   typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
   349 
   350   allocator_type get_allocator() const { return allocator_type(_M_size);  }
   351 
   352   unsigned char _M_depth;
   353   _CharT* _STLP_VOLATILE _M_c_string;
   354   _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
   355 
   356 # ifdef _STLP_NO_ARROW_OPERATOR
   357   _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
   358 # endif
   359 
   360   /* Flattened version of string, if needed.  */
   361   /* typically 0.                             */
   362   /* If it's not 0, then the memory is owned  */
   363   /* by this node.                            */
   364   /* In the case of a leaf, this may point to */
   365   /* the same memory as the data field.       */
   366   _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
   367                 allocator_type __a) :
   368     _Refcount_Base(1),
   369     _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
   370   { }
   371 
   372   typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
   373 # ifdef _STLP_HAS_WCHAR_T
   374   typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
   375 # else
   376   typedef __false_type _IsWCharT;
   377 # endif
   378 
   379   typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
   380 
   381 #if 0
   382   /* Please tell why this code is necessary if you uncomment it.
   383    * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
   384    * returns a size > n in order to store the terminating null charater. When
   385    * instanciation type is not a char or wchar_t this is not guaranty resulting in
   386    * memory overrun.
   387    */
   388   static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
   389     // Allow slop for in-place expansion.
   390     return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
   391   }
   392 
   393   static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
   394     // Allow slop for in-place expansion.
   395     return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
   396   }
   397 #endif
   398   // fbp : moved from RopeLeaf
   399   static size_t _S_rounded_up_size(size_t __n)
   400   //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
   401   { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
   402 
   403   static void _S_free_string( _CharT* __s, size_t __len,
   404                              allocator_type __a) {
   405     _STLP_STD::_Destroy_Range(__s, __s + __len);
   406     //  This has to be a static member, so this gets a bit messy
   407 #   ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
   408     __a.deallocate(__s, _S_rounded_up_size(__len));    //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
   409 #   else
   410     __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
   411 #   endif
   412   }
   413 
   414   // Deallocate data section of a leaf.
   415   // This shouldn't be a member function.
   416   // But its hard to do anything else at the
   417   // moment, because it's templatized w.r.t.
   418   // an allocator.
   419   // Does nothing if __GC is defined.
   420   void _M_free_c_string();
   421   void _M_free_tree();
   422   // Deallocate t. Assumes t is not 0.
   423   void _M_unref_nonnil() {
   424     if (_M_decr() == 0) _M_free_tree();
   425   }
   426   void _M_ref_nonnil() {
   427     _M_incr();
   428   }
   429   static void _S_unref(_Self* __t) {
   430     if (0 != __t) {
   431       __t->_M_unref_nonnil();
   432     }
   433   }
   434   static void _S_ref(_Self* __t) {
   435     if (0 != __t) __t->_M_incr();
   436   }
   437   //static void _S_free_if_unref(_Self* __t) {
   438   //  if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
   439   //}
   440 };
   441 
   442 template<class _CharT, class _Alloc>
   443 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
   444 public:
   445   _CharT* _M_data; /* Not necessarily 0 terminated. */
   446                                 /* The allocated size is         */
   447                                 /* _S_rounded_up_size(size), except */
   448                                 /* in the GC case, in which it   */
   449                                 /* doesn't matter.               */
   450 private:
   451   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   452   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
   453   void _M_init(__true_type const& /*_IsBasicCharType*/) {
   454     this->_M_c_string = _M_data;
   455   }
   456   void _M_init(__false_type const& /*_IsBasicCharType*/) {}
   457 
   458 public:
   459   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   460   typedef typename _RopeRep::allocator_type allocator_type;
   461 
   462   _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
   463     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
   464       _M_data(__d) {
   465     _STLP_ASSERT(_p_size > 0)
   466     _M_init(_IsBasicCharType());
   467   }
   468 
   469 # ifdef _STLP_NO_ARROW_OPERATOR
   470   _Rope_RopeLeaf() {}
   471   _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
   472 # endif
   473 
   474 // The constructor assumes that d has been allocated with
   475   // the proper allocator and the properly padded size.
   476   // In contrast, the destructor deallocates the data:
   477   ~_Rope_RopeLeaf() {
   478     if (_M_data != this->_M_c_string) {
   479       this->_M_free_c_string();
   480     }
   481     _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
   482   }
   483 };
   484 
   485 template<class _CharT, class _Alloc>
   486 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
   487 private:
   488   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   489 
   490 public:
   491   _RopeRep* _M_left;
   492   _RopeRep* _M_right;
   493   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   494   typedef typename _RopeRep::allocator_type allocator_type;
   495   _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
   496     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
   497                                    (max)(__l->_M_depth, __r->_M_depth) + 1, false,
   498                                    __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
   499   {}
   500 # ifdef _STLP_NO_ARROW_OPERATOR
   501   _Rope_RopeConcatenation() {}
   502   _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
   503 # endif
   504 
   505   ~_Rope_RopeConcatenation() {
   506     this->_M_free_c_string();
   507     _M_left->_M_unref_nonnil();
   508     _M_right->_M_unref_nonnil();
   509   }
   510 };
   511 
   512 template <class _CharT, class _Alloc>
   513 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
   514 private:
   515   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   516 public:
   517   char_producer<_CharT>* _M_fn;
   518   /*
   519    * Char_producer is owned by the
   520    * rope and should be explicitly
   521    * deleted when the rope becomes
   522    * inaccessible.
   523    */
   524   bool _M_delete_when_done;
   525   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   526   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
   527 # ifdef _STLP_NO_ARROW_OPERATOR
   528   _Rope_RopeFunction() {}
   529   _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
   530 # endif
   531 
   532   _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
   533                      bool __d, allocator_type __a)
   534     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
   535     , _M_delete_when_done(__d)
   536   { _STLP_ASSERT(_p_size > 0) }
   537 
   538   ~_Rope_RopeFunction() {
   539     this->_M_free_c_string();
   540     if (_M_delete_when_done) {
   541       delete _M_fn;
   542     }
   543   }
   544 };
   545 
   546 /*
   547  * Substring results are usually represented using just
   548  * concatenation nodes.  But in the case of very long flat ropes
   549  * or ropes with a functional representation that isn't practical.
   550  * In that case, we represent the __result as a special case of
   551  * RopeFunction, whose char_producer points back to the rope itself.
   552  * In all cases except repeated substring operations and
   553  * deallocation, we treat the __result as a RopeFunction.
   554  */
   555 template<class _CharT, class _Alloc>
   556 struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
   557 public:
   558   // XXX this whole class should be rewritten.
   559   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   560   _RopeRep *_M_base;      // not 0
   561   size_t _M_start;
   562   /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
   563                                 _CharT* __buffer) {
   564     typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
   565     typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
   566     switch (_M_base->_M_tag) {
   567     case _RopeRep::_S_function:
   568     case _RopeRep::_S_substringfn:
   569       {
   570         char_producer<_CharT>* __fn =
   571           __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
   572         _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
   573         _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
   574         (*__fn)(__start_pos + _M_start, __req_len, __buffer);
   575       }
   576       break;
   577     case _RopeRep::_S_leaf:
   578       {
   579         _CharT* __s =
   580           __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
   581         _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
   582       }
   583       break;
   584     default:
   585       _STLP_ASSERT(false)
   586         ;
   587     }
   588   }
   589 
   590   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   591   typedef typename _RopeRep::allocator_type allocator_type;
   592 
   593   _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
   594     : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
   595       _M_base(__b), _M_start(__s) {
   596     _STLP_ASSERT(__l > 0)
   597     _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
   598     _M_base->_M_ref_nonnil();
   599     this->_M_tag = _RopeRep::_S_substringfn;
   600   }
   601   virtual ~_Rope_RopeSubstring()
   602   { _M_base->_M_unref_nonnil(); }
   603 };
   604 
   605 /*
   606  * Self-destructing pointers to Rope_rep.
   607  * These are not conventional smart pointers.  Their
   608  * only purpose in life is to ensure that unref is called
   609  * on the pointer either at normal exit or if an exception
   610  * is raised.  It is the caller's responsibility to
   611  * adjust reference counts when these pointers are initialized
   612  * or assigned to.  (This convention significantly reduces
   613  * the number of potentially expensive reference count
   614  * updates.)
   615  */
   616 template<class _CharT, class _Alloc>
   617 struct _Rope_self_destruct_ptr {
   618   _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
   619   ~_Rope_self_destruct_ptr()
   620   { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
   621 #   ifdef _STLP_USE_EXCEPTIONS
   622   _Rope_self_destruct_ptr() : _M_ptr(0) {}
   623 #   else
   624   _Rope_self_destruct_ptr() {}
   625 #   endif
   626   _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
   627   _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
   628   _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
   629   operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
   630   _Rope_self_destruct_ptr<_CharT, _Alloc>&
   631   operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
   632   { _M_ptr = __x; return *this; }
   633 };
   634 
   635 /*
   636  * Dereferencing a nonconst iterator has to return something
   637  * that behaves almost like a reference.  It's not possible to
   638  * return an actual reference since assignment requires extra
   639  * work.  And we would get into the same problems as with the
   640  * CD2 version of basic_string.
   641  */
   642 template<class _CharT, class _Alloc>
   643 class _Rope_char_ref_proxy {
   644   typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
   645   friend class rope<_CharT,_Alloc>;
   646   friend class _Rope_iterator<_CharT,_Alloc>;
   647   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
   648   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
   649   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   650   typedef rope<_CharT,_Alloc> _My_rope;
   651   size_t _M_pos;
   652   _CharT _M_current;
   653   bool _M_current_valid;
   654   _My_rope* _M_root;     // The whole rope.
   655 public:
   656   _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
   657     _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
   658   _Rope_char_ref_proxy(const _Self& __x) :
   659     _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
   660   // Don't preserve cache if the reference can outlive the
   661   // expression.  We claim that's not possible without calling
   662   // a copy constructor or generating reference to a proxy
   663   // reference.  We declare the latter to have undefined semantics.
   664   _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
   665     : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
   666   inline operator _CharT () const;
   667   _Self& operator= (_CharT __c);
   668   _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
   669   _Self& operator= (const _Self& __c) {
   670     return operator=((_CharT)__c);
   671   }
   672 };
   673 
   674 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   675 template<class _CharT, class __Alloc>
   676 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
   677                  _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
   678   _CharT __tmp = __a;
   679   __a = __b;
   680   __b = __tmp;
   681 }
   682 #else
   683 // There is no really acceptable way to handle this.  The default
   684 // definition of swap doesn't work for proxy references.
   685 // It can't really be made to work, even with ugly hacks, since
   686 // the only unusual operation it uses is the copy constructor, which
   687 // is needed for other purposes.  We provide a macro for
   688 // full specializations, and instantiate the most common case.
   689 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
   690     inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
   691                      _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
   692         _CharT __tmp = __a; \
   693         __a = __b; \
   694         __b = __tmp; \
   695     }
   696 
   697 _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
   698 
   699 # ifndef _STLP_NO_WCHAR_T
   700 _ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
   701 # endif
   702 
   703 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
   704 
   705 template<class _CharT, class _Alloc>
   706 class _Rope_char_ptr_proxy {
   707   // XXX this class should be rewritten.
   708 public:
   709   typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
   710   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
   711   size_t _M_pos;
   712   rope<_CharT,_Alloc>* _M_root;     // The whole rope.
   713 
   714   _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
   715     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
   716   _Rope_char_ptr_proxy(const _Self& __x)
   717     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
   718   _Rope_char_ptr_proxy() {}
   719   _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
   720     _STLP_ASSERT(0 == __x)
   721   }
   722   _Self& operator= (const _Self& __x) {
   723     _M_pos = __x._M_pos;
   724     _M_root = __x._M_root;
   725     return *this;
   726   }
   727 
   728   _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
   729     return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
   730   }
   731 };
   732 
   733 
   734 /*
   735  * Rope iterators:
   736  * Unlike in the C version, we cache only part of the stack
   737  * for rope iterators, since they must be efficiently copyable.
   738  * When we run out of cache, we have to reconstruct the iterator
   739  * value.
   740  * Pointers from iterators are not included in reference counts.
   741  * Iterators are assumed to be thread private.  Ropes can
   742  * be shared.
   743  */
   744 template<class _CharT, class _Alloc>
   745 class _Rope_iterator_base
   746 /*   : public random_access_iterator<_CharT, ptrdiff_t>  */
   747 {
   748   friend class rope<_CharT,_Alloc>;
   749   typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
   750   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
   751 public:
   752   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   753 
   754   enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
   755   enum { _S_iterator_buf_len = 15 };
   756   size_t _M_current_pos;
   757   // The whole rope.
   758   _RopeRep* _M_root;
   759   // Starting position for current leaf
   760   size_t _M_leaf_pos;
   761   // Buffer possibly containing current char.
   762   _CharT* _M_buf_start;
   763   // Pointer to current char in buffer, != 0 ==> buffer valid.
   764   _CharT* _M_buf_ptr;
   765   // One past __last valid char in buffer.
   766   _CharT* _M_buf_end;
   767 
   768   // What follows is the path cache.  We go out of our
   769   // way to make this compact.
   770   // Path_end contains the bottom section of the path from
   771   // the root to the current leaf.
   772   struct {
   773 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
   774     _RopeRep const*_M_data[4];
   775 #  else
   776     _RopeRep const*_M_data[_S_path_cache_len];
   777 #  endif
   778   } _M_path_end;
   779   // Last valid __pos in path_end;
   780   // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
   781   // point to concatenation nodes.
   782   int _M_leaf_index;
   783   // (_M_path_directions >> __i) & 1 is 1
   784   // if we got from _M_path_end[leaf_index - __i - 1]
   785   // to _M_path_end[leaf_index - __i] by going to the
   786   // __right. Assumes path_cache_len <= 9.
   787   unsigned char _M_path_directions;
   788   // Short buffer for surrounding chars.
   789   // This is useful primarily for
   790   // RopeFunctions.  We put the buffer
   791   // here to avoid locking in the
   792   // multithreaded case.
   793   // The cached path is generally assumed to be valid
   794   // only if the buffer is valid.
   795   struct {
   796 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
   797     _CharT _M_data[15];
   798 #  else
   799     _CharT _M_data[_S_iterator_buf_len];
   800 #  endif
   801   } _M_tmp_buf;
   802 
   803   // Set buffer contents given path cache.
   804   static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
   805   // Set buffer contents and path cache.
   806   static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
   807   // As above, but assumes path cache is valid for previous posn.
   808   static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
   809   _Rope_iterator_base() {}
   810   _Rope_iterator_base(_RopeRep* __root, size_t __pos)
   811     : _M_current_pos(__pos),_M_root(__root),  _M_buf_ptr(0) {}
   812   void _M_incr(size_t __n);
   813   void _M_decr(size_t __n);
   814 public:
   815   size_t index() const { return _M_current_pos; }
   816 private:
   817   void _M_copy_buf(const _Self& __x) {
   818     _M_tmp_buf = __x._M_tmp_buf;
   819     if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
   820       _M_buf_start = _M_tmp_buf._M_data;
   821       _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
   822       _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
   823     } else {
   824       _M_buf_end = __x._M_buf_end;
   825     }
   826   }
   827 
   828 public:
   829   _Rope_iterator_base(const _Self& __x) : 
   830       _M_current_pos(__x._M_current_pos),
   831       _M_root(__x._M_root),
   832       _M_leaf_pos( __x._M_leaf_pos ),
   833       _M_buf_start(__x._M_buf_start),
   834       _M_buf_ptr(__x._M_buf_ptr),
   835       _M_path_end(__x._M_path_end),
   836       _M_leaf_index(__x._M_leaf_index),
   837       _M_path_directions(__x._M_path_directions)
   838       {
   839         if (0 != __x._M_buf_ptr) {
   840           _M_copy_buf(__x);
   841         }
   842       }
   843   _Self& operator = (const _Self& __x)
   844       {
   845         _M_current_pos = __x._M_current_pos;
   846         _M_root = __x._M_root;
   847         _M_buf_start = __x._M_buf_start;
   848         _M_buf_ptr = __x._M_buf_ptr;
   849         _M_path_end = __x._M_path_end;
   850         _M_leaf_index = __x._M_leaf_index;
   851         _M_path_directions = __x._M_path_directions;
   852         _M_leaf_pos = __x._M_leaf_pos;
   853         if (0 != __x._M_buf_ptr) {
   854           _M_copy_buf(__x);
   855         }
   856         return *this;
   857       }
   858 };
   859 
   860 template<class _CharT, class _Alloc> class _Rope_iterator;
   861 
   862 template<class _CharT, class _Alloc>
   863 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
   864   friend class rope<_CharT,_Alloc>;
   865   typedef  _Rope_const_iterator<_CharT, _Alloc> _Self;
   866   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
   867   //  protected:
   868 public:
   869 #   ifndef _STLP_HAS_NO_NAMESPACES
   870   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   871   // The one from the base class may not be directly visible.
   872 #   endif
   873   _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
   874     _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
   875     // Only nonconst iterators modify root ref count
   876   {}
   877 public:
   878   typedef _CharT reference;   // Really a value.  Returning a reference
   879                               // Would be a mess, since it would have
   880                               // to be included in refcount.
   881   typedef const _CharT* pointer;
   882   typedef _CharT value_type;
   883   typedef ptrdiff_t difference_type;
   884   typedef random_access_iterator_tag iterator_category;
   885 
   886 public:
   887   _Rope_const_iterator() {}
   888   _Rope_const_iterator(const _Self& __x) :
   889     _Rope_iterator_base<_CharT,_Alloc>(__x) { }
   890   _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
   891     _Rope_iterator_base<_CharT,_Alloc>(__x) {}
   892   _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
   893     _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
   894   _Self& operator= (const _Self& __x) {
   895     _Base::operator=(__x);
   896     return *this;
   897   }
   898   reference operator*() {
   899     if (0 == this->_M_buf_ptr)
   900 #if !defined (__DMC__)
   901       _Rope_iterator_base<_CharT, _Alloc>::_S_setcache(*this);
   902 #else
   903     { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
   904 #endif
   905     return *(this->_M_buf_ptr);
   906   }
   907   _Self& operator++() {
   908     _CharT* __next;
   909     if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
   910       this->_M_buf_ptr = __next;
   911       ++this->_M_current_pos;
   912     } else {
   913       this->_M_incr(1);
   914     }
   915     return *this;
   916   }
   917   _Self& operator+=(ptrdiff_t __n) {
   918     if (__n >= 0) {
   919       this->_M_incr(__n);
   920     } else {
   921       this->_M_decr(-__n);
   922     }
   923     return *this;
   924   }
   925   _Self& operator--() {
   926     this->_M_decr(1);
   927     return *this;
   928   }
   929   _Self& operator-=(ptrdiff_t __n) {
   930     if (__n >= 0) {
   931       this->_M_decr(__n);
   932     } else {
   933       this->_M_incr(-__n);
   934     }
   935     return *this;
   936   }
   937   _Self operator++(int) {
   938     size_t __old_pos = this->_M_current_pos;
   939     this->_M_incr(1);
   940     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
   941     // This makes a subsequent dereference expensive.
   942     // Perhaps we should instead copy the iterator
   943     // if it has a valid cache?
   944   }
   945   _Self operator--(int) {
   946     size_t __old_pos = this->_M_current_pos;
   947     this->_M_decr(1);
   948     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
   949   }
   950   inline reference operator[](size_t __n);
   951 };
   952 
   953 template<class _CharT, class _Alloc>
   954 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
   955   friend class rope<_CharT,_Alloc>;
   956   typedef _Rope_iterator<_CharT, _Alloc> _Self;
   957   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
   958   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   959 
   960 public:
   961   rope<_CharT,_Alloc>* _M_root_rope;
   962   // root is treated as a cached version of this,
   963   // and is used to detect changes to the underlying
   964   // rope.
   965   // Root is included in the reference count.
   966   // This is necessary so that we can detect changes reliably.
   967   // Unfortunately, it requires careful bookkeeping for the
   968   // nonGC case.
   969   _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
   970 
   971   void _M_check();
   972 public:
   973   typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
   974   typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
   975   typedef _CharT value_type;
   976   typedef ptrdiff_t difference_type;
   977   typedef random_access_iterator_tag iterator_category;
   978 public:
   979   ~_Rope_iterator() {  //*TY 5/6/00 - added dtor to balance reference count
   980     _RopeRep::_S_unref(this->_M_root);
   981   }
   982 
   983   rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
   984   _Rope_iterator() {
   985     this->_M_root = 0;  // Needed for reference counting.
   986   }
   987   _Rope_iterator(const  _Self& __x) :
   988     _Rope_iterator_base<_CharT,_Alloc>(__x) {
   989     _M_root_rope = __x._M_root_rope;
   990     _RopeRep::_S_ref(this->_M_root);
   991   }
   992   _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
   993   _Self& operator= (const  _Self& __x) {
   994     _RopeRep* __old = this->_M_root;
   995     _RopeRep::_S_ref(__x._M_root);
   996     _Base::operator=(__x);
   997     _M_root_rope = __x._M_root_rope;
   998     _RopeRep::_S_unref(__old);
   999     return *this;
  1000   }
  1001   reference operator*() {
  1002     _M_check();
  1003     if (0 == this->_M_buf_ptr) {
  1004       return reference(_M_root_rope, this->_M_current_pos);
  1005     } else {
  1006       return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
  1007     }
  1008   }
  1009   _Self& operator++() {
  1010     this->_M_incr(1);
  1011     return *this;
  1012   }
  1013   _Self& operator+=(ptrdiff_t __n) {
  1014     if (__n >= 0) {
  1015       this->_M_incr(__n);
  1016     } else {
  1017       this->_M_decr(-__n);
  1018     }
  1019     return *this;
  1020   }
  1021   _Self& operator--() {
  1022     this->_M_decr(1);
  1023     return *this;
  1024   }
  1025   _Self& operator-=(ptrdiff_t __n) {
  1026     if (__n >= 0) {
  1027       this->_M_decr(__n);
  1028     } else {
  1029       this->_M_incr(-__n);
  1030     }
  1031     return *this;
  1032   }
  1033   _Self operator++(int) {
  1034     size_t __old_pos = this->_M_current_pos;
  1035     this->_M_incr(1);
  1036     return _Self(_M_root_rope, __old_pos);
  1037   }
  1038   _Self operator--(int) {
  1039     size_t __old_pos = this->_M_current_pos;
  1040     this->_M_decr(1);
  1041     return _Self(_M_root_rope, __old_pos);
  1042   }
  1043   reference operator[](ptrdiff_t __n) {
  1044     return reference(_M_root_rope, this->_M_current_pos + __n);
  1045   }
  1046 };
  1047 
  1048 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
  1049 template <class _CharT, class _Alloc>
  1050 inline random_access_iterator_tag
  1051 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) {  return random_access_iterator_tag();}
  1052 template <class _CharT, class _Alloc>
  1053 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  1054 template <class _CharT, class _Alloc>
  1055 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  1056 template <class _CharT, class _Alloc>
  1057 inline random_access_iterator_tag
  1058 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
  1059 template <class _CharT, class _Alloc>
  1060 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  1061 template <class _CharT, class _Alloc>
  1062 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  1063 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
  1064 
  1065 template <class _CharT, class _Alloc, class _CharConsumer>
  1066 bool _S_apply_to_pieces(_CharConsumer& __c,
  1067                         _Rope_RopeRep<_CharT, _Alloc> *__r,
  1068                         size_t __begin, size_t __end);
  1069                         // begin and end are assumed to be in range.
  1070 
  1071 template <class _CharT, class _Alloc>
  1072 class rope
  1073 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
  1074            : public __stlport_class<rope<_CharT, _Alloc> >
  1075 #endif
  1076 {
  1077   typedef rope<_CharT,_Alloc> _Self;
  1078 public:
  1079   typedef _CharT value_type;
  1080   typedef ptrdiff_t difference_type;
  1081   typedef size_t size_type;
  1082   typedef _CharT const_reference;
  1083   typedef const _CharT* const_pointer;
  1084   typedef _Rope_iterator<_CharT,_Alloc> iterator;
  1085   typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
  1086   typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
  1087   typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
  1088 
  1089   friend class _Rope_iterator<_CharT,_Alloc>;
  1090   friend class _Rope_const_iterator<_CharT,_Alloc>;
  1091   friend struct _Rope_RopeRep<_CharT,_Alloc>;
  1092   friend class _Rope_iterator_base<_CharT,_Alloc>;
  1093   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
  1094   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
  1095   friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
  1096 
  1097   _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
  1098 
  1099 protected:
  1100   typedef _CharT* _Cstrptr;
  1101 
  1102   static _CharT _S_empty_c_str[1];
  1103 
  1104   enum { _S_copy_max = 23 };
  1105   // For strings shorter than _S_copy_max, we copy to
  1106   // concatenate.
  1107 
  1108   typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
  1109   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
  1110 
  1111 public:
  1112   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  1113   typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type  allocator_type;
  1114 
  1115 public:
  1116   // The only data member of a rope:
  1117   _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
  1118 
  1119 public:
  1120   allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
  1121 
  1122 public:
  1123   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
  1124   typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
  1125   typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
  1126   typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
  1127 
  1128   // Retrieve a character at the indicated position.
  1129   static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
  1130 
  1131   // Obtain a pointer to the character at the indicated position.
  1132   // The pointer can be used to change the character.
  1133   // If such a pointer cannot be produced, as is frequently the
  1134   // case, 0 is returned instead.
  1135   // (Returns nonzero only if all nodes in the path have a refcount
  1136   // of 1.)
  1137   static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
  1138 
  1139   static void _S_unref(_RopeRep* __t) {
  1140     _RopeRep::_S_unref(__t);
  1141   }
  1142   static void _S_ref(_RopeRep* __t) {
  1143     _RopeRep::_S_ref(__t);
  1144   }
  1145 
  1146   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
  1147 
  1148   // _Result is counted in refcount.
  1149   static _RopeRep* _S_substring(_RopeRep* __base,
  1150                                 size_t __start, size_t __endp1);
  1151 
  1152   static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
  1153                                        const _CharT* __iter, size_t __slen);
  1154   // Concatenate rope and char ptr, copying __s.
  1155   // Should really take an arbitrary iterator.
  1156   // Result is counted in refcount.
  1157   static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
  1158                                              const _CharT* __iter, size_t __slen);
  1159     // As above, but one reference to __r is about to be
  1160     // destroyed.  Thus the pieces may be recycled if all
  1161     // relevent reference counts are 1.
  1162 
  1163   // General concatenation on _RopeRep.  _Result
  1164   // has refcount of 1.  Adjusts argument refcounts.
  1165   static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
  1166 
  1167 public:
  1168 #if defined (_STLP_MEMBER_TEMPLATES)
  1169   template <class _CharConsumer>
  1170 #else
  1171   typedef _Rope_char_consumer<_CharT> _CharConsumer;
  1172 #endif
  1173   void apply_to_pieces(size_t __begin, size_t __end,
  1174                        _CharConsumer& __c) const
  1175   { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
  1176 
  1177 protected:
  1178 
  1179   static size_t _S_rounded_up_size(size_t __n)
  1180   { return _RopeRep::_S_rounded_up_size(__n); }
  1181 
  1182   // Allocate and construct a RopeLeaf using the supplied allocator
  1183   // Takes ownership of s instead of copying.
  1184   static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
  1185                                     size_t _p_size, allocator_type __a) {
  1186     _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1187                                                 _RopeLeaf).allocate(1);
  1188     _STLP_TRY {
  1189       _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
  1190     }
  1191    _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
  1192                                        _RopeLeaf).deallocate(__space, 1))
  1193     return __space;
  1194   }
  1195 
  1196   static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
  1197                                                       allocator_type __a) {
  1198    _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1199                                                         _RopeConcatenation).allocate(1);
  1200     return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
  1201   }
  1202 
  1203   static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
  1204                                             size_t _p_size, bool __d, allocator_type __a) {
  1205    _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1206                                                    _RopeFunction).allocate(1);
  1207     return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
  1208   }
  1209 
  1210   static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
  1211                                               size_t __l, allocator_type __a) {
  1212    _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1213                                                     _RopeSubstring).allocate(1);
  1214     return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
  1215   }
  1216 
  1217   static
  1218   _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
  1219                                                size_t _p_size, allocator_type __a) {
  1220     if (0 == _p_size) return 0;
  1221 
  1222    _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
  1223 
  1224     _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
  1225     _S_construct_null(__buf + _p_size);
  1226 
  1227     _STLP_TRY {
  1228       return _S_new_RopeLeaf(__buf, _p_size, __a);
  1229     }
  1230     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
  1231     _STLP_RET_AFTER_THROW(0)
  1232   }
  1233 
  1234 
  1235   // Concatenation of nonempty strings.
  1236   // Always builds a concatenation node.
  1237   // Rebalances if the result is too deep.
  1238   // Result has refcount 1.
  1239   // Does not increment left and right ref counts even though
  1240   // they are referenced.
  1241   static _RopeRep*
  1242   _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
  1243 
  1244   // Concatenation helper functions
  1245   static _RopeLeaf*
  1246   _S_leaf_concat_char_iter(_RopeLeaf* __r,
  1247                            const _CharT* __iter, size_t __slen);
  1248   // Concatenate by copying leaf.
  1249   // should take an arbitrary iterator
  1250   // result has refcount 1.
  1251   static _RopeLeaf* _S_destr_leaf_concat_char_iter
  1252   (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
  1253   // A version that potentially clobbers __r if __r->_M_ref_count == 1.
  1254 
  1255 
  1256   // A helper function for exponentiating strings.
  1257   // This uses a nonstandard refcount convention.
  1258   // The result has refcount 0.
  1259   typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1260 #if !defined (__GNUC__) || (__GNUC__ < 3)
  1261   friend _Concat_fn;
  1262 #else
  1263   friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
  1264 #endif
  1265 
  1266 public:
  1267   static size_t _S_char_ptr_len(const _CharT* __s) {
  1268     return char_traits<_CharT>::length(__s);
  1269   }
  1270 
  1271 public: /* for operators */
  1272   rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
  1273     : _M_tree_ptr(__a, __t) { }
  1274 private:
  1275   // Copy __r to the _CharT buffer.
  1276   // Returns __buffer + __r->_M_size._M_data.
  1277   // Assumes that buffer is uninitialized.
  1278   static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
  1279 
  1280   // Again, with explicit starting position and length.
  1281   // Assumes that buffer is uninitialized.
  1282   static _CharT* _S_flatten(_RopeRep* __r,
  1283                             size_t __start, size_t __len,
  1284                             _CharT* __buffer);
  1285 
  1286   // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
  1287 public:
  1288   static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
  1289 protected:
  1290   static bool _S_is_balanced(_RopeRep* __r)
  1291   { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
  1292 
  1293   static bool _S_is_almost_balanced(_RopeRep* __r) {
  1294     return (__r->_M_depth == 0 ||
  1295             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
  1296   }
  1297 
  1298   static bool _S_is_roughly_balanced(_RopeRep* __r) {
  1299     return (__r->_M_depth <= 1 ||
  1300             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
  1301   }
  1302 
  1303   // Assumes the result is not empty.
  1304   static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
  1305                                               _RopeRep* __right) {
  1306     _RopeRep* __result = _S_concat_rep(__left, __right);
  1307     if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
  1308     return __result;
  1309   }
  1310 
  1311   // The basic rebalancing operation.  Logically copies the
  1312   // rope.  The result has refcount of 1.  The client will
  1313   // usually decrement the reference count of __r.
  1314   // The result is within height 2 of balanced by the above
  1315   // definition.
  1316   static _RopeRep* _S_balance(_RopeRep* __r);
  1317 
  1318   // Add all unbalanced subtrees to the forest of balanceed trees.
  1319   // Used only by balance.
  1320   static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
  1321 
  1322   // Add __r to forest, assuming __r is already balanced.
  1323   static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
  1324 
  1325 #ifdef _STLP_DEBUG
  1326   // Print to stdout, exposing structure
  1327   static void _S_dump(_RopeRep* __r, int __indent = 0);
  1328 #endif
  1329 
  1330   // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
  1331   static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
  1332 
  1333   void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
  1334 
  1335   void _M_reset(_RopeRep* __r) {
  1336     //if (__r != _M_tree_ptr._M_data) {
  1337       _S_unref(_M_tree_ptr._M_data);
  1338       _M_tree_ptr._M_data = __r;
  1339     //}
  1340   }
  1341 
  1342 public:
  1343   bool empty() const { return 0 == _M_tree_ptr._M_data; }
  1344 
  1345   // Comparison member function.  This is public only for those
  1346   // clients that need a ternary comparison.  Others
  1347   // should use the comparison operators below.
  1348   int compare(const _Self& __y) const {
  1349     return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
  1350   }
  1351 
  1352   rope(const _CharT* __s, const allocator_type& __a = allocator_type())
  1353     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
  1354   {}
  1355 
  1356   rope(const _CharT* __s, size_t __len,
  1357        const allocator_type& __a = allocator_type())
  1358     : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
  1359   {}
  1360 
  1361   // Should perhaps be templatized with respect to the iterator type
  1362   // and use Sequence_buffer.  (It should perhaps use sequence_buffer
  1363   // even now.)
  1364   rope(const _CharT *__s, const _CharT *__e,
  1365        const allocator_type& __a = allocator_type())
  1366     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
  1367   {}
  1368 
  1369   rope(const const_iterator& __s, const const_iterator& __e,
  1370        const allocator_type& __a = allocator_type())
  1371     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1372                                     __e._M_current_pos))
  1373   {}
  1374 
  1375   rope(const iterator& __s, const iterator& __e,
  1376        const allocator_type& __a = allocator_type())
  1377     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1378                                     __e._M_current_pos))
  1379   {}
  1380 
  1381   rope(_CharT __c, const allocator_type& __a = allocator_type())
  1382     : _M_tree_ptr(__a, (_RopeRep*)0) {
  1383     _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
  1384 
  1385     _Copy_Construct(__buf, __c);
  1386     _S_construct_null(__buf + 1);
  1387 
  1388     _STLP_TRY {
  1389       _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
  1390     }
  1391     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
  1392   }
  1393 
  1394   rope(size_t __n, _CharT __c,
  1395        const allocator_type& __a = allocator_type()):
  1396     _M_tree_ptr(__a, (_RopeRep*)0) {
  1397     if (0 == __n)
  1398       return;
  1399 
  1400     rope<_CharT,_Alloc> __result;
  1401 # define  __exponentiate_threshold size_t(32)
  1402     _RopeRep* __remainder;
  1403     rope<_CharT,_Alloc> __remainder_rope;
  1404 
  1405     // gcc-2.7.2 bugs
  1406     typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1407 
  1408     size_t __exponent = __n / __exponentiate_threshold;
  1409     size_t __rest = __n % __exponentiate_threshold;
  1410     if (0 == __rest) {
  1411       __remainder = 0;
  1412     } else {
  1413       _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
  1414       uninitialized_fill_n(__rest_buffer, __rest, __c);
  1415       _S_construct_null(__rest_buffer + __rest);
  1416       _STLP_TRY {
  1417         __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
  1418       }
  1419       _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
  1420     }
  1421     __remainder_rope._M_tree_ptr._M_data = __remainder;
  1422     if (__exponent != 0) {
  1423       _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
  1424       _RopeLeaf* __base_leaf;
  1425       rope<_CharT,_Alloc> __base_rope;
  1426       uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
  1427       _S_construct_null(__base_buffer + __exponentiate_threshold);
  1428       _STLP_TRY {
  1429         __base_leaf = _S_new_RopeLeaf(__base_buffer,
  1430                                       __exponentiate_threshold, __a);
  1431       }
  1432       _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
  1433                                             __exponentiate_threshold, __a))
  1434       __base_rope._M_tree_ptr._M_data = __base_leaf;
  1435       if (1 == __exponent) {
  1436         __result = __base_rope;
  1437         // One each for base_rope and __result
  1438         //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
  1439       } else {
  1440         __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
  1441       }
  1442       if (0 != __remainder) {
  1443         __result += __remainder_rope;
  1444       }
  1445     } else {
  1446       __result = __remainder_rope;
  1447     }
  1448     _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
  1449     _M_tree_ptr._M_data->_M_ref_nonnil();
  1450 # undef __exponentiate_threshold
  1451   }
  1452 
  1453   rope(const allocator_type& __a = allocator_type())
  1454     : _M_tree_ptr(__a, (_RopeRep*)0) {}
  1455 
  1456   // Construct a rope from a function that can compute its members
  1457   rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
  1458        const allocator_type& __a = allocator_type())
  1459     : _M_tree_ptr(__a, (_RopeRep*)0) {
  1460     _M_tree_ptr._M_data = (0 == __len) ?
  1461       0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
  1462   }
  1463 
  1464   rope(const _Self& __x)
  1465     : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
  1466     _S_ref(_M_tree_ptr._M_data);
  1467   }
  1468 
  1469   rope(__move_source<_Self> __src)
  1470     : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
  1471     __src.get()._M_tree_ptr._M_data = 0;
  1472   }
  1473 
  1474   ~rope() {
  1475     _S_unref(_M_tree_ptr._M_data);
  1476   }
  1477 
  1478   _Self& operator=(const _Self& __x) {
  1479     _STLP_ASSERT(get_allocator() == __x.get_allocator())
  1480     _S_ref(__x._M_tree_ptr._M_data);
  1481     _M_reset(__x._M_tree_ptr._M_data);
  1482     return *this;
  1483   }
  1484 
  1485   void clear() {
  1486     _S_unref(_M_tree_ptr._M_data);
  1487     _M_tree_ptr._M_data = 0;
  1488   }
  1489   void push_back(_CharT __x) {
  1490     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
  1491   }
  1492 
  1493   void pop_back() {
  1494     _RopeRep* __old = _M_tree_ptr._M_data;
  1495     _M_tree_ptr._M_data =
  1496       _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1497     _S_unref(__old);
  1498   }
  1499 
  1500   _CharT back() const {
  1501     return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1502   }
  1503 
  1504   void push_front(_CharT __x) {
  1505     _RopeRep* __old = _M_tree_ptr._M_data;
  1506     _RopeRep* __left =
  1507       _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
  1508     _STLP_TRY {
  1509       _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
  1510       _S_unref(__old);
  1511       _S_unref(__left);
  1512     }
  1513     _STLP_UNWIND(_S_unref(__left))
  1514   }
  1515 
  1516   void pop_front() {
  1517     _RopeRep* __old = _M_tree_ptr._M_data;
  1518     _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
  1519     _S_unref(__old);
  1520   }
  1521 
  1522   _CharT front() const {
  1523     return _S_fetch(_M_tree_ptr._M_data, 0);
  1524   }
  1525 
  1526   void balance() {
  1527     _RopeRep* __old = _M_tree_ptr._M_data;
  1528     _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
  1529     _S_unref(__old);
  1530   }
  1531 
  1532   void copy(_CharT* __buffer) const {
  1533     _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
  1534     _S_flatten(_M_tree_ptr._M_data, __buffer);
  1535   }
  1536 
  1537   /*
  1538    * This is the copy function from the standard, but
  1539    * with the arguments reordered to make it consistent with the
  1540    * rest of the interface.
  1541    * Note that this guaranteed not to compile if the draft standard
  1542    * order is assumed.
  1543    */
  1544   size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
  1545     size_t _p_size = size();
  1546     size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
  1547 
  1548     _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
  1549     _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
  1550     return __len;
  1551   }
  1552 
  1553 # ifdef _STLP_DEBUG
  1554   // Print to stdout, exposing structure.  May be useful for
  1555   // performance debugging.
  1556   void dump() {
  1557     _S_dump(_M_tree_ptr._M_data);
  1558   }
  1559 # endif
  1560 
  1561   // Convert to 0 terminated string in new allocated memory.
  1562   // Embedded 0s in the input do not terminate the copy.
  1563   const _CharT* c_str() const;
  1564 
  1565   // As above, but also use the flattened representation as the
  1566   // the new rope representation.
  1567   const _CharT* replace_with_c_str();
  1568 
  1569   // Reclaim memory for the c_str generated flattened string.
  1570   // Intentionally undocumented, since it's hard to say when this
  1571   // is safe for multiple threads.
  1572   void delete_c_str () {
  1573     if (0 == _M_tree_ptr._M_data) return;
  1574     if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
  1575         ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
  1576         _M_tree_ptr._M_data->_M_c_string) {
  1577       // Representation shared
  1578       return;
  1579     }
  1580     _M_tree_ptr._M_data->_M_free_c_string();
  1581     _M_tree_ptr._M_data->_M_c_string = 0;
  1582   }
  1583 
  1584   _CharT operator[] (size_type __pos) const {
  1585     return _S_fetch(_M_tree_ptr._M_data, __pos);
  1586   }
  1587 
  1588   _CharT at(size_type __pos) const {
  1589     if (__pos >= size()) _M_throw_out_of_range();
  1590     return (*this)[__pos];
  1591   }
  1592 
  1593   const_iterator begin() const {
  1594     return(const_iterator(_M_tree_ptr._M_data, 0));
  1595   }
  1596 
  1597   // An easy way to get a const iterator from a non-const container.
  1598   const_iterator const_begin() const {
  1599     return(const_iterator(_M_tree_ptr._M_data, 0));
  1600   }
  1601 
  1602   const_iterator end() const {
  1603     return(const_iterator(_M_tree_ptr._M_data, size()));
  1604   }
  1605 
  1606   const_iterator const_end() const {
  1607     return(const_iterator(_M_tree_ptr._M_data, size()));
  1608   }
  1609 
  1610   size_type size() const {
  1611     return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
  1612   }
  1613 
  1614   size_type length() const {
  1615     return size();
  1616   }
  1617 
  1618   size_type max_size() const {
  1619     return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
  1620     //  Guarantees that the result can be sufficiently
  1621     //  balanced.  Longer ropes will probably still work,
  1622     //  but it's harder to make guarantees.
  1623   }
  1624 
  1625   const_reverse_iterator rbegin() const {
  1626     return const_reverse_iterator(end());
  1627   }
  1628 
  1629   const_reverse_iterator const_rbegin() const {
  1630     return const_reverse_iterator(end());
  1631   }
  1632 
  1633   const_reverse_iterator rend() const {
  1634     return const_reverse_iterator(begin());
  1635   }
  1636 
  1637   const_reverse_iterator const_rend() const {
  1638     return const_reverse_iterator(begin());
  1639   }
  1640   // The symmetric cases are intentionally omitted, since they're presumed
  1641   // to be less common, and we don't handle them as well.
  1642 
  1643   // The following should really be templatized.
  1644   // The first argument should be an input iterator or
  1645   // forward iterator with value_type _CharT.
  1646   _Self& append(const _CharT* __iter, size_t __n) {
  1647     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
  1648     return *this;
  1649   }
  1650 
  1651   _Self& append(const _CharT* __c_string) {
  1652     size_t __len = _S_char_ptr_len(__c_string);
  1653     append(__c_string, __len);
  1654     return *this;
  1655   }
  1656 
  1657   _Self& append(const _CharT* __s, const _CharT* __e) {
  1658     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
  1659     return *this;
  1660   }
  1661 
  1662   _Self& append(const_iterator __s, const_iterator __e) {
  1663     _STLP_ASSERT(__s._M_root == __e._M_root)
  1664     _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
  1665     _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
  1666     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
  1667     return *this;
  1668   }
  1669 
  1670   _Self& append(_CharT __c) {
  1671     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
  1672     return *this;
  1673   }
  1674 
  1675   _Self& append() { return append(_CharT()); }  // XXX why?
  1676 
  1677   _Self& append(const _Self& __y) {
  1678     _STLP_ASSERT(__y.get_allocator() == get_allocator())
  1679     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
  1680     return *this;
  1681   }
  1682 
  1683   _Self& append(size_t __n, _CharT __c) {
  1684     rope<_CharT,_Alloc> __last(__n, __c);
  1685     return append(__last);
  1686   }
  1687 
  1688   void swap(_Self& __b) {
  1689     _M_tree_ptr.swap(__b._M_tree_ptr);
  1690   }
  1691 
  1692 protected:
  1693   // Result is included in refcount.
  1694   static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
  1695                            size_t __pos2, _RopeRep* __r) {
  1696     if (0 == __old) { _S_ref(__r); return __r; }
  1697     _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
  1698     _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
  1699     _STLP_MPWFIX_TRY  //*TY 06/01/2000 -
  1700     _RopeRep* __result;
  1701 
  1702     if (0 == __r) {
  1703       __result = _S_concat_rep(__left, __right);
  1704     } else {
  1705       _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
  1706       _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
  1707       __result = _S_concat_rep(__left_result, __right);
  1708     }
  1709     return __result;
  1710     _STLP_MPWFIX_CATCH  //*TY 06/01/2000 -
  1711   }
  1712 
  1713 public:
  1714   void insert(size_t __p, const _Self& __r) {
  1715     if (__p > size()) _M_throw_out_of_range();
  1716     _STLP_ASSERT(get_allocator() == __r.get_allocator())
  1717     _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
  1718   }
  1719 
  1720   void insert(size_t __p, size_t __n, _CharT __c) {
  1721     rope<_CharT,_Alloc> __r(__n,__c);
  1722     insert(__p, __r);
  1723   }
  1724 
  1725   void insert(size_t __p, const _CharT* __i, size_t __n) {
  1726     if (__p > size()) _M_throw_out_of_range();
  1727     _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
  1728     _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
  1729     _Self_destruct_ptr __left_result(
  1730                                      _S_concat_char_iter(__left, __i, __n));
  1731     // _S_ destr_concat_char_iter should be safe here.
  1732     // But as it stands it's probably not a win, since __left
  1733     // is likely to have additional references.
  1734     _M_reset(_S_concat_rep(__left_result, __right));
  1735   }
  1736 
  1737   void insert(size_t __p, const _CharT* __c_string) {
  1738     insert(__p, __c_string, _S_char_ptr_len(__c_string));
  1739   }
  1740 
  1741   void insert(size_t __p, _CharT __c) {
  1742     insert(__p, &__c, 1);
  1743   }
  1744 
  1745   void insert(size_t __p) {
  1746     _CharT __c = _CharT();
  1747     insert(__p, &__c, 1);
  1748   }
  1749 
  1750   void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
  1751     _Self __r(__i, __j);
  1752     insert(__p, __r);
  1753   }
  1754 
  1755   void insert(size_t __p, const const_iterator& __i,
  1756                           const const_iterator& __j) {
  1757     _Self __r(__i, __j);
  1758     insert(__p, __r);
  1759   }
  1760 
  1761   void insert(size_t __p, const iterator& __i,
  1762                           const iterator& __j) {
  1763     _Self __r(__i, __j);
  1764     insert(__p, __r);
  1765   }
  1766 
  1767   // (position, length) versions of replace operations:
  1768   void replace(size_t __p, size_t __n, const _Self& __r) {
  1769     if (__p > size()) _M_throw_out_of_range();
  1770     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
  1771   }
  1772 
  1773   void replace(size_t __p, size_t __n,
  1774                const _CharT* __i, size_t __i_len) {
  1775     _Self __r(__i, __i_len);
  1776     replace(__p, __n, __r);
  1777   }
  1778 
  1779   void replace(size_t __p, size_t __n, _CharT __c) {
  1780     _Self __r(__c);
  1781     replace(__p, __n, __r);
  1782   }
  1783 
  1784   void replace(size_t __p, size_t __n, const _CharT* __c_string) {
  1785     _Self __r(__c_string);
  1786     replace(__p, __n, __r);
  1787   }
  1788 
  1789   void replace(size_t __p, size_t __n,
  1790                const _CharT* __i, const _CharT* __j) {
  1791     _Self __r(__i, __j);
  1792     replace(__p, __n, __r);
  1793   }
  1794 
  1795   void replace(size_t __p, size_t __n,
  1796                const const_iterator& __i, const const_iterator& __j) {
  1797     _Self __r(__i, __j);
  1798     replace(__p, __n, __r);
  1799   }
  1800 
  1801   void replace(size_t __p, size_t __n,
  1802                const iterator& __i, const iterator& __j) {
  1803     _Self __r(__i, __j);
  1804     replace(__p, __n, __r);
  1805   }
  1806 
  1807   // Single character variants:
  1808   void replace(size_t __p, _CharT __c) {
  1809     if (__p > size()) _M_throw_out_of_range();
  1810     iterator __i(this, __p);
  1811     *__i = __c;
  1812   }
  1813 
  1814   void replace(size_t __p, const _Self& __r) {
  1815     replace(__p, 1, __r);
  1816   }
  1817 
  1818   void replace(size_t __p, const _CharT* __i, size_t __i_len) {
  1819     replace(__p, 1, __i, __i_len);
  1820   }
  1821 
  1822   void replace(size_t __p, const _CharT* __c_string) {
  1823     replace(__p, 1, __c_string);
  1824   }
  1825 
  1826   void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
  1827     replace(__p, 1, __i, __j);
  1828   }
  1829 
  1830   void replace(size_t __p, const const_iterator& __i,
  1831                            const const_iterator& __j) {
  1832     replace(__p, 1, __i, __j);
  1833   }
  1834 
  1835   void replace(size_t __p, const iterator& __i,
  1836                            const iterator& __j) {
  1837     replace(__p, 1, __i, __j);
  1838   }
  1839 
  1840   // Erase, (position, size) variant.
  1841   void erase(size_t __p, size_t __n) {
  1842     if (__p > size()) _M_throw_out_of_range();
  1843     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
  1844   }
  1845 
  1846   // Erase, single character
  1847   void erase(size_t __p) {
  1848     erase(__p, __p + 1);
  1849   }
  1850 
  1851   // Insert, iterator variants.
  1852   iterator insert(const iterator& __p, const _Self& __r)
  1853   { insert(__p.index(), __r); return __p; }
  1854   iterator insert(const iterator& __p, size_t __n, _CharT __c)
  1855   { insert(__p.index(), __n, __c); return __p; }
  1856   iterator insert(const iterator& __p, _CharT __c)
  1857   { insert(__p.index(), __c); return __p; }
  1858   iterator insert(const iterator& __p )
  1859   { insert(__p.index()); return __p; }
  1860   iterator insert(const iterator& __p, const _CharT* c_string)
  1861   { insert(__p.index(), c_string); return __p; }
  1862   iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
  1863   { insert(__p.index(), __i, __n); return __p; }
  1864   iterator insert(const iterator& __p, const _CharT* __i,
  1865                   const _CharT* __j)
  1866   { insert(__p.index(), __i, __j);  return __p; }
  1867   iterator insert(const iterator& __p,
  1868                   const const_iterator& __i, const const_iterator& __j)
  1869   { insert(__p.index(), __i, __j); return __p; }
  1870   iterator insert(const iterator& __p,
  1871                   const iterator& __i, const iterator& __j)
  1872   { insert(__p.index(), __i, __j); return __p; }
  1873 
  1874   // Replace, range variants.
  1875   void replace(const iterator& __p, const iterator& __q,
  1876                const _Self& __r)
  1877   { replace(__p.index(), __q.index() - __p.index(), __r); }
  1878   void replace(const iterator& __p, const iterator& __q, _CharT __c)
  1879   { replace(__p.index(), __q.index() - __p.index(), __c); }
  1880   void replace(const iterator& __p, const iterator& __q,
  1881                const _CharT* __c_string)
  1882   { replace(__p.index(), __q.index() - __p.index(), __c_string); }
  1883   void replace(const iterator& __p, const iterator& __q,
  1884                const _CharT* __i, size_t __n)
  1885   { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
  1886   void replace(const iterator& __p, const iterator& __q,
  1887                const _CharT* __i, const _CharT* __j)
  1888   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1889   void replace(const iterator& __p, const iterator& __q,
  1890                const const_iterator& __i, const const_iterator& __j)
  1891   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1892   void replace(const iterator& __p, const iterator& __q,
  1893                const iterator& __i, const iterator& __j)
  1894   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1895 
  1896   // Replace, iterator variants.
  1897   void replace(const iterator& __p, const _Self& __r)
  1898   { replace(__p.index(), __r); }
  1899   void replace(const iterator& __p, _CharT __c)
  1900   { replace(__p.index(), __c); }
  1901   void replace(const iterator& __p, const _CharT* __c_string)
  1902   { replace(__p.index(), __c_string); }
  1903   void replace(const iterator& __p, const _CharT* __i, size_t __n)
  1904   { replace(__p.index(), __i, __n); }
  1905   void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
  1906   { replace(__p.index(), __i, __j); }
  1907   void replace(const iterator& __p, const_iterator __i,
  1908                const_iterator __j)
  1909   { replace(__p.index(), __i, __j); }
  1910   void replace(const iterator& __p, iterator __i, iterator __j)
  1911   { replace(__p.index(), __i, __j); }
  1912 
  1913   // Iterator and range variants of erase
  1914   iterator erase(const iterator& __p, const iterator& __q) {
  1915     size_t __p_index = __p.index();
  1916     erase(__p_index, __q.index() - __p_index);
  1917     return iterator(this, __p_index);
  1918   }
  1919   iterator erase(const iterator& __p) {
  1920     size_t __p_index = __p.index();
  1921     erase(__p_index, 1);
  1922     return iterator(this, __p_index);
  1923   }
  1924 
  1925   _Self substr(size_t __start, size_t __len = 1) const {
  1926     if (__start > size()) _M_throw_out_of_range();
  1927     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
  1928   }
  1929 
  1930   _Self substr(iterator __start, iterator __end) const {
  1931     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  1932   }
  1933 
  1934   _Self substr(iterator __start) const {
  1935     size_t __pos = __start.index();
  1936     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  1937   }
  1938 
  1939   _Self substr(const_iterator __start, const_iterator __end) const {
  1940     // This might eventually take advantage of the cache in the
  1941     // iterator.
  1942     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  1943   }
  1944 
  1945   rope<_CharT,_Alloc> substr(const_iterator __start) {
  1946     size_t __pos = __start.index();
  1947     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  1948   }
  1949 
  1950 #include <stl/_string_npos.h>
  1951 
  1952   size_type find(const _Self& __s, size_type __pos = 0) const {
  1953     if (__pos >= size())
  1954 # ifndef _STLP_OLD_ROPE_SEMANTICS
  1955       return npos;
  1956 # else
  1957       return size();
  1958 # endif
  1959 
  1960     size_type __result_pos;
  1961     const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
  1962     __result_pos = __result.index();
  1963 # ifndef _STLP_OLD_ROPE_SEMANTICS
  1964     if (__result_pos == size()) __result_pos = npos;
  1965 # endif
  1966     return __result_pos;
  1967   }
  1968   size_type find(_CharT __c, size_type __pos = 0) const;
  1969   size_type find(const _CharT* __s, size_type __pos = 0) const {
  1970     size_type __result_pos;
  1971     const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
  1972                                      __s, __s + _S_char_ptr_len(__s));
  1973     __result_pos = __result.index();
  1974 # ifndef _STLP_OLD_ROPE_SEMANTICS
  1975     if (__result_pos == size()) __result_pos = npos;
  1976 # endif
  1977     return __result_pos;
  1978   }
  1979 
  1980   iterator mutable_begin() {
  1981     return(iterator(this, 0));
  1982   }
  1983 
  1984   iterator mutable_end() {
  1985     return(iterator(this, size()));
  1986   }
  1987 
  1988   reverse_iterator mutable_rbegin() {
  1989     return reverse_iterator(mutable_end());
  1990   }
  1991 
  1992   reverse_iterator mutable_rend() {
  1993     return reverse_iterator(mutable_begin());
  1994   }
  1995 
  1996   reference mutable_reference_at(size_type __pos) {
  1997     return reference(this, __pos);
  1998   }
  1999 
  2000 # ifdef __STD_STUFF
  2001   reference operator[] (size_type __pos) {
  2002     return reference(this, __pos);
  2003   }
  2004 
  2005   reference at(size_type __pos) {
  2006     if (__pos >= size()) _M_throw_out_of_range();
  2007     return (*this)[__pos];
  2008   }
  2009 
  2010   void resize(size_type, _CharT) {}
  2011   void resize(size_type) {}
  2012   void reserve(size_type = 0) {}
  2013   size_type capacity() const {
  2014     return max_size();
  2015   }
  2016 
  2017   // Stuff below this line is dangerous because it's error prone.
  2018   // I would really like to get rid of it.
  2019   // copy function with funny arg ordering.
  2020   size_type copy(_CharT* __buffer, size_type __n,
  2021                  size_type __pos = 0) const {
  2022     return copy(__pos, __n, __buffer);
  2023   }
  2024 
  2025   iterator end() { return mutable_end(); }
  2026 
  2027   iterator begin() { return mutable_begin(); }
  2028 
  2029   reverse_iterator rend() { return mutable_rend(); }
  2030 
  2031   reverse_iterator rbegin() { return mutable_rbegin(); }
  2032 
  2033 # else
  2034 
  2035   const_iterator end() { return const_end(); }
  2036 
  2037   const_iterator begin() { return const_begin(); }
  2038 
  2039   const_reverse_iterator rend() { return const_rend(); }
  2040 
  2041   const_reverse_iterator rbegin() { return const_rbegin(); }
  2042 
  2043 # endif
  2044 }; //class rope
  2045 
  2046 #if !defined (_STLP_STATIC_CONST_INIT_BUG)
  2047 #  if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
  2048 template <class _CharT, class _Alloc>
  2049 const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
  2050 #  endif
  2051 #endif
  2052 
  2053 template <class _CharT, class _Alloc>
  2054 inline _CharT
  2055 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
  2056 { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
  2057 
  2058 template <class _CharT, class _Alloc>
  2059 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2060                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2061   return (__x._M_current_pos == __y._M_current_pos &&
  2062           __x._M_root == __y._M_root);
  2063 }
  2064 
  2065 template <class _CharT, class _Alloc>
  2066 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2067                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2068 { return (__x._M_current_pos < __y._M_current_pos); }
  2069 
  2070 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2071 
  2072 template <class _CharT, class _Alloc>
  2073 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2074                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2075 { return !(__x == __y); }
  2076 
  2077 template <class _CharT, class _Alloc>
  2078 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2079                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2080 { return __y < __x; }
  2081 
  2082 template <class _CharT, class _Alloc>
  2083 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2084                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2085 { return !(__y < __x); }
  2086 
  2087 template <class _CharT, class _Alloc>
  2088 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2089                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2090 { return !(__x < __y); }
  2091 
  2092 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2093 
  2094 template <class _CharT, class _Alloc>
  2095 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2096                            const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2097 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
  2098 
  2099 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
  2100 template <class _CharT, class _Alloc>
  2101 inline _Rope_const_iterator<_CharT,_Alloc>
  2102 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
  2103 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
  2104 # endif
  2105 
  2106 template <class _CharT, class _Alloc>
  2107 inline _Rope_const_iterator<_CharT,_Alloc>
  2108 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
  2109 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
  2110 
  2111 template <class _CharT, class _Alloc>
  2112 inline _Rope_const_iterator<_CharT,_Alloc>
  2113 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
  2114 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
  2115 
  2116 template <class _CharT, class _Alloc>
  2117 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
  2118                         const _Rope_iterator<_CharT,_Alloc>& __y) {
  2119   return (__x._M_current_pos == __y._M_current_pos &&
  2120           __x._M_root_rope == __y._M_root_rope);
  2121 }
  2122 
  2123 template <class _CharT, class _Alloc>
  2124 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
  2125                        const _Rope_iterator<_CharT,_Alloc>& __y)
  2126 { return (__x._M_current_pos < __y._M_current_pos); }
  2127 
  2128 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
  2129 template <class _CharT, class _Alloc>
  2130 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2131                         const _Rope_iterator<_CharT,_Alloc>& __y)
  2132 { return !(__x == __y); }
  2133 
  2134 template <class _CharT, class _Alloc>
  2135 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
  2136                        const _Rope_iterator<_CharT,_Alloc>& __y)
  2137 { return __y < __x; }
  2138 
  2139 template <class _CharT, class _Alloc>
  2140 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2141                         const _Rope_iterator<_CharT,_Alloc>& __y)
  2142 { return !(__y < __x); }
  2143 
  2144 template <class _CharT, class _Alloc>
  2145 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2146                         const _Rope_iterator<_CharT,_Alloc>& __y)
  2147 { return !(__x < __y); }
  2148 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2149 
  2150 template <class _CharT, class _Alloc>
  2151 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  2152                            const _Rope_iterator<_CharT,_Alloc>& __y)
  2153 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
  2154 
  2155 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
  2156 template <class _CharT, class _Alloc>
  2157 inline _Rope_iterator<_CharT,_Alloc>
  2158 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  2159           ptrdiff_t __n) {
  2160   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
  2161 }
  2162 # endif
  2163 
  2164 template <class _CharT, class _Alloc>
  2165 inline _Rope_iterator<_CharT,_Alloc>
  2166 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
  2167           ptrdiff_t __n) {
  2168   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
  2169 }
  2170 
  2171 template <class _CharT, class _Alloc>
  2172 inline _Rope_iterator<_CharT,_Alloc>
  2173 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
  2174   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
  2175 }
  2176 
  2177 template <class _CharT, class _Alloc>
  2178 inline rope<_CharT,_Alloc>
  2179 operator+ (const rope<_CharT,_Alloc>& __left,
  2180            const rope<_CharT,_Alloc>& __right) {
  2181   _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
  2182   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
  2183   // Inlining this should make it possible to keep __left and __right in registers.
  2184 }
  2185 
  2186 template <class _CharT, class _Alloc>
  2187 inline rope<_CharT,_Alloc>&
  2188 operator+= (rope<_CharT,_Alloc>& __left,
  2189             const rope<_CharT,_Alloc>& __right) {
  2190   __left.append(__right);
  2191   return __left;
  2192 }
  2193 
  2194 template <class _CharT, class _Alloc>
  2195 inline rope<_CharT,_Alloc>
  2196 operator+ (const rope<_CharT,_Alloc>& __left,
  2197            const _CharT* __right) {
  2198   size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
  2199   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
  2200 }
  2201 
  2202 template <class _CharT, class _Alloc>
  2203 inline rope<_CharT,_Alloc>&
  2204 operator+= (rope<_CharT,_Alloc>& __left,
  2205             const _CharT* __right) {
  2206   __left.append(__right);
  2207   return __left;
  2208 }
  2209 
  2210 template <class _CharT, class _Alloc>
  2211 inline rope<_CharT,_Alloc>
  2212 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
  2213   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
  2214 }
  2215 
  2216 template <class _CharT, class _Alloc>
  2217 inline rope<_CharT,_Alloc>&
  2218 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
  2219   __left.append(__right);
  2220   return __left;
  2221 }
  2222 
  2223 template <class _CharT, class _Alloc>
  2224 inline bool
  2225 operator< (const rope<_CharT,_Alloc>& __left,
  2226            const rope<_CharT,_Alloc>& __right) {
  2227   return __left.compare(__right) < 0;
  2228 }
  2229 
  2230 template <class _CharT, class _Alloc>
  2231 inline bool
  2232 operator== (const rope<_CharT,_Alloc>& __left,
  2233             const rope<_CharT,_Alloc>& __right) {
  2234   return __left.compare(__right) == 0;
  2235 }
  2236 
  2237 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2238 
  2239 template <class _CharT, class _Alloc>
  2240 inline bool
  2241 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2242   return !(__x == __y);
  2243 }
  2244 
  2245 template <class _CharT, class _Alloc>
  2246 inline bool
  2247 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2248   return __y < __x;
  2249 }
  2250 
  2251 template <class _CharT, class _Alloc>
  2252 inline bool
  2253 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2254   return !(__y < __x);
  2255 }
  2256 
  2257 template <class _CharT, class _Alloc>
  2258 inline bool
  2259 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2260   return !(__x < __y);
  2261 }
  2262 
  2263 template <class _CharT, class _Alloc>
  2264 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  2265                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  2266   return !(__x == __y);
  2267 }
  2268 
  2269 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2270 
  2271 template <class _CharT, class _Alloc>
  2272 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  2273                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  2274   return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
  2275 }
  2276 
  2277 #if !defined (_STLP_USE_NO_IOSTREAMS)
  2278 template<class _CharT, class _Traits, class _Alloc>
  2279 basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
  2280                                             const rope<_CharT, _Alloc>& __r);
  2281 #endif
  2282 
  2283 typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
  2284 #if defined (_STLP_HAS_WCHAR_T)
  2285 typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
  2286 #endif
  2287 
  2288 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
  2289 { return __c.mutable_reference_at(__i); }
  2290 
  2291 #if defined (_STLP_HAS_WCHAR_T)
  2292 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
  2293 { return __c.mutable_reference_at(__i); }
  2294 #endif
  2295 
  2296 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
  2297 template <class _CharT, class _Alloc>
  2298 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
  2299 { __x.swap(__y); }
  2300 #else
  2301 
  2302 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
  2303 # ifdef _STLP_HAS_WCHAR_T  // dwa 8/21/97
  2304 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
  2305 # endif
  2306 
  2307 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
  2308 
  2309 
  2310 // Hash functions should probably be revisited later:
  2311 _STLP_TEMPLATE_NULL struct hash<crope> {
  2312   size_t operator()(const crope& __str) const {
  2313     size_t _p_size = __str.size();
  2314 
  2315     if (0 == _p_size) return 0;
  2316     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2317   }
  2318 };
  2319 
  2320 #if defined (_STLP_HAS_WCHAR_T)  // dwa 8/21/97
  2321 _STLP_TEMPLATE_NULL struct hash<wrope> {
  2322   size_t operator()(const wrope& __str) const {
  2323     size_t _p_size = __str.size();
  2324 
  2325     if (0 == _p_size) return 0;
  2326     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2327   }
  2328 };
  2329 #endif
  2330 
  2331 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
  2332 // I couldn't get this to work with VC++
  2333 template<class _CharT,class _Alloc>
  2334 #  if defined (__DMC__) && !defined (__PUT_STATIC_DATA_MEMBERS_HERE)
  2335 extern
  2336 #  endif
  2337 void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
  2338                   _Rope_iterator<_CharT, _Alloc> __middle,
  2339                   _Rope_iterator<_CharT, _Alloc> __last);
  2340 
  2341 inline void rotate(_Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __first,
  2342                    _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __middle,
  2343                    _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __last)
  2344 { _Rope_rotate(__first, __middle, __last); }
  2345 #endif
  2346 
  2347 template <class _CharT, class _Alloc>
  2348 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
  2349   if (_M_current_valid) {
  2350     return _M_current;
  2351   } else {
  2352     return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
  2353   }
  2354 }
  2355 
  2356 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION)
  2357 template <class _CharT, class _Alloc>
  2358 struct __move_traits<rope<_CharT, _Alloc> > {
  2359   typedef __stlp_movable implemented;
  2360   //Completness depends on the allocator:
  2361   typedef typename __move_traits<_Alloc>::complete complete;
  2362 };
  2363 #endif
  2364 
  2365 _STLP_END_NAMESPACE
  2366 
  2367 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
  2368 #  include <stl/_rope.c>
  2369 #endif
  2370 
  2371 #endif /* _STLP_INTERNAL_ROPE_H */
  2372 
  2373 // Local Variables:
  2374 // mode:C++
  2375 // End: