epoc32/include/tools/stlport/stl/_rope.h
author William Roberts <williamr@symbian.org>
Wed, 31 Mar 2010 12:33:34 +0100
branchSymbian3
changeset 4 837f303aceeb
permissions -rw-r--r--
Current Symbian^3 public API header files (from PDK 3.0.h)
This is the epoc32/include tree with the "platform" subtrees removed, and
all but a selected few mbg and rsg files removed.
     1 /*
     2  *
     3  * Copyright (c) 1996,1997
     4  * Silicon Graphics Computer Systems, Inc.
     5  *
     6  * Copyright (c) 1997
     7  * Moscow Center for SPARC Technology
     8  *
     9  * Copyright (c) 1999
    10  * Boris Fomitchev
    11  *
    12  * This material is provided "as is", with absolutely no warranty expressed
    13  * or implied. Any use is at your own risk.
    14  *
    15  * Permission to use or copy this software for any purpose is hereby granted
    16  * without fee, provided the above notices are retained on all copies.
    17  * Permission to modify the code and to distribute modified code is granted,
    18  * provided the above notices are retained, and a notice that the code was
    19  * modified is included with the above copyright notice.
    20  *
    21  */
    22 
    23 /* NOTE: This is an internal header file, included by other STL headers.
    24  *   You should not attempt to use it directly.
    25  */
    26 
    27 // rope<_CharT,_Alloc> is a sequence of _CharT.
    28 // Ropes appear to be mutable, but update operations
    29 // really copy enough of the data structure to leave the original
    30 // valid.  Thus ropes can be logically copied by just copying
    31 // a pointer value.
    32 
    33 #ifndef _STLP_INTERNAL_ROPE_H
    34 #define _STLP_INTERNAL_ROPE_H
    35 
    36 #ifndef _STLP_INTERNAL_ALGOBASE_H
    37 #  include <stl/_algobase.h>
    38 #endif
    39 
    40 #ifndef _STLP_IOSFWD
    41 #  include <iosfwd>
    42 #endif
    43 
    44 #ifndef _STLP_INTERNAL_ALLOC_H
    45 #  include <stl/_alloc.h>
    46 #endif
    47 
    48 #ifndef _STLP_INTERNAL_ITERATOR_H
    49 #  include <stl/_iterator.h>
    50 #endif
    51 
    52 #ifndef _STLP_INTERNAL_ALGO_H
    53 #  include <stl/_algo.h>
    54 #endif
    55 
    56 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
    57 #  include <stl/_function_base.h>
    58 #endif
    59 
    60 #ifndef _STLP_INTERNAL_NUMERIC_H
    61 #  include <stl/_numeric.h>
    62 #endif
    63 
    64 #ifndef _STLP_INTERNAL_HASH_FUN_H
    65 #  include <stl/_hash_fun.h>
    66 #endif
    67 
    68 #ifndef _STLP_CHAR_TRAITS_H
    69 #  include <stl/char_traits.h>
    70 #endif
    71 
    72 #ifndef _STLP_INTERNAL_THREADS_H
    73 #  include <stl/_threads.h>
    74 #endif
    75 
    76 #ifdef _STLP_SGI_THREADS
    77 #  include <mutex.h>
    78 #endif
    79 
    80 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
    81 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
    82 #elif defined(__MRC__)||defined(__SC__)
    83 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
    84 #else
    85 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
    86 #endif
    87 
    88 _STLP_BEGIN_NAMESPACE
    89 
    90 // First a lot of forward declarations.  The standard seems to require
    91 // much stricter "declaration before use" than many of the implementations
    92 // that preceded it.
    93 template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
    94 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
    95 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
    96 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
    97 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
    98 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
    99 template<class _CharT, class _Alloc> class _Rope_iterator;
   100 template<class _CharT, class _Alloc> class _Rope_const_iterator;
   101 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
   102 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
   103 
   104 _STLP_MOVE_TO_PRIV_NAMESPACE
   105 
   106 // Some helpers, so we can use the power algorithm on ropes.
   107 // See below for why this isn't local to the implementation.
   108 
   109 // This uses a nonstandard refcount convention.
   110 // The result has refcount 0.
   111 template<class _CharT, class _Alloc>
   112 struct _Rope_Concat_fn
   113   : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
   114                            rope<_CharT,_Alloc> > {
   115   rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
   116                                   const rope<_CharT,_Alloc>& __y) {
   117     return __x + __y;
   118   }
   119 };
   120 
   121 template <class _CharT, class _Alloc>
   122 inline
   123 rope<_CharT,_Alloc>
   124 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
   125 { return rope<_CharT,_Alloc>(); }
   126 
   127 _STLP_MOVE_TO_STD_NAMESPACE
   128 
   129 // Store an eos
   130 template <class _CharT>
   131 inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
   132 { *__p = 0; }
   133 
   134 template <class _CharT>
   135 inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
   136 { _STLP_STD::_Construct(__p); }
   137 
   138 template <class _CharT>
   139 inline void _S_construct_null(_CharT *__p) {
   140   typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
   141   _S_construct_null_aux(__p, _Char_Is_Integral());
   142 }
   143 
   144 // char_producers are logically functions that generate a section of
   145 // a string.  These can be converted to ropes.  The resulting rope
   146 // invokes the char_producer on demand.  This allows, for example,
   147 // files to be viewed as ropes without reading the entire file.
   148 template <class _CharT>
   149 class char_producer {
   150 public:
   151   virtual ~char_producer() {}
   152   virtual void operator()(size_t __start_pos, size_t __len,
   153                           _CharT* __buffer) = 0;
   154   // Buffer should really be an arbitrary output iterator.
   155   // That way we could flatten directly into an ostream, etc.
   156   // This is thoroughly impossible, since iterator types don't
   157   // have runtime descriptions.
   158 };
   159 
   160 // Sequence buffers:
   161 //
   162 // Sequence must provide an append operation that appends an
   163 // array to the sequence.  Sequence buffers are useful only if
   164 // appending an entire array is cheaper than appending element by element.
   165 // This is true for many string representations.
   166 // This should  perhaps inherit from ostream<sequence::value_type>
   167 // and be implemented correspondingly, so that they can be used
   168 // for formatted.  For the sake of portability, we don't do this yet.
   169 //
   170 // For now, sequence buffers behave as output iterators.  But they also
   171 // behave a little like basic_ostringstream<sequence::value_type> and a
   172 // little like containers.
   173 
   174 template<class _Sequence
   175 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
   176        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
   177          , size_t _Buf_sz = 100
   178 #   if defined(__sgi) && !defined(__GNUC__)
   179 #   define __TYPEDEF_WORKAROUND
   180          ,class _V = typename _Sequence::value_type
   181 #   endif /* __sgi */
   182 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   183          >
   184 // The 3rd parameter works around a common compiler bug.
   185 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
   186 public:
   187 # ifndef __TYPEDEF_WORKAROUND
   188   typedef typename _Sequence::value_type value_type;
   189   typedef sequence_buffer<_Sequence
   190 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
   191        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
   192   , _Buf_sz
   193   > _Self;
   194 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   195   > _Self;
   196   enum { _Buf_sz = 100};
   197 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   198   // # endif
   199 # else /* __TYPEDEF_WORKAROUND */
   200   typedef _V value_type;
   201   typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
   202 # endif /* __TYPEDEF_WORKAROUND */
   203 protected:
   204   _Sequence* _M_prefix;
   205   value_type _M_buffer[_Buf_sz];
   206   size_t     _M_buf_count;
   207 public:
   208   void flush() {
   209     _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
   210     _M_buf_count = 0;
   211   }
   212   ~sequence_buffer() { flush(); }
   213   sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
   214   sequence_buffer(const _Self& __x) {
   215     _M_prefix = __x._M_prefix;
   216     _M_buf_count = __x._M_buf_count;
   217     copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
   218   }
   219   sequence_buffer(_Self& __x) {
   220     __x.flush();
   221     _M_prefix = __x._M_prefix;
   222     _M_buf_count = 0;
   223   }
   224   sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
   225   _Self& operator= (_Self& __x) {
   226     __x.flush();
   227     _M_prefix = __x._M_prefix;
   228     _M_buf_count = 0;
   229     return *this;
   230   }
   231   _Self& operator= (const _Self& __x) {
   232     _M_prefix = __x._M_prefix;
   233     _M_buf_count = __x._M_buf_count;
   234     copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
   235     return *this;
   236   }
   237   void push_back(value_type __x) {
   238     if (_M_buf_count < _Buf_sz) {
   239       _M_buffer[_M_buf_count] = __x;
   240       ++_M_buf_count;
   241     } else {
   242       flush();
   243       _M_buffer[0] = __x;
   244       _M_buf_count = 1;
   245     }
   246   }
   247   void append(const value_type *__s, size_t __len) {
   248     if (__len + _M_buf_count <= _Buf_sz) {
   249       size_t __i = _M_buf_count;
   250       size_t __j = 0;
   251       for (; __j < __len; __i++, __j++) {
   252         _M_buffer[__i] = __s[__j];
   253       }
   254       _M_buf_count += __len;
   255     } else if (0 == _M_buf_count) {
   256       _M_prefix->append(__s, __s + __len);
   257     } else {
   258       flush();
   259       append(__s, __len);
   260     }
   261   }
   262   _Self& write(const value_type *__s, size_t __len) {
   263     append(__s, __len);
   264     return *this;
   265   }
   266   _Self& put(value_type __x) {
   267     push_back(__x);
   268     return *this;
   269   }
   270   _Self& operator=(const value_type& __rhs) {
   271     push_back(__rhs);
   272     return *this;
   273   }
   274   _Self& operator*() { return *this; }
   275   _Self& operator++() { return *this; }
   276   _Self& operator++(int) { return *this; }
   277 };
   278 
   279 // The following should be treated as private, at least for now.
   280 template<class _CharT>
   281 class _Rope_char_consumer {
   282 #if !defined (_STLP_MEMBER_TEMPLATES)
   283 public:
   284   //Without member templates we have to use run-time parameterization.
   285   // The symmetry with char_producer is accidental and temporary.
   286   virtual ~_Rope_char_consumer() {}
   287   virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
   288 #endif
   289 };
   290 
   291 //
   292 // What follows should really be local to rope.  Unfortunately,
   293 // that doesn't work, since it makes it impossible to define generic
   294 // equality on rope iterators.  According to the draft standard, the
   295 // template parameters for such an equality operator cannot be inferred
   296 // from the occurence of a member class as a parameter.
   297 // (SGI compilers in fact allow this, but the __result wouldn't be
   298 // portable.)
   299 // Similarly, some of the static member functions are member functions
   300 // only to avoid polluting the global namespace, and to circumvent
   301 // restrictions on type inference for template functions.
   302 //
   303 
   304 //
   305 // The internal data structure for representing a rope.  This is
   306 // private to the implementation.  A rope is really just a pointer
   307 // to one of these.
   308 //
   309 // A few basic functions for manipulating this data structure
   310 // are members of _RopeRep.  Most of the more complex algorithms
   311 // are implemented as rope members.
   312 //
   313 // Some of the static member functions of _RopeRep have identically
   314 // named functions in rope that simply invoke the _RopeRep versions.
   315 //
   316 
   317 template<class _CharT, class _Alloc>
   318 struct _Rope_RopeRep
   319   : public _Refcount_Base
   320 {
   321   typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
   322 public:
   323   //
   324   // GAB: 11/09/05
   325   //
   326   // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
   327   // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
   328   // but this addition causes the sunpro compiler to complain about
   329   // multiple declarations during the initialization of "_S_min_len".
   330   // Changed to be a fixed value and the sunpro compiler appears to
   331   // be happy???
   332   //
   333 #  define __ROPE_MAX_DEPTH  45
   334 #  define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
   335   enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
   336   enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
   337   // Apparently needed by VC++
   338   // The data fields of leaves are allocated with some
   339   // extra space, to accomodate future growth and for basic
   340   // character types, to hold a trailing eos character.
   341   enum { _S_alloc_granularity = 8 };
   342 
   343   _Tag _M_tag:8;
   344   bool _M_is_balanced:8;
   345 
   346   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   347   typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
   348 
   349   allocator_type get_allocator() const { return allocator_type(_M_size);  }
   350 
   351   unsigned char _M_depth;
   352   _CharT* _STLP_VOLATILE _M_c_string;
   353   _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
   354 
   355 # ifdef _STLP_NO_ARROW_OPERATOR
   356   _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
   357 # endif
   358 
   359   /* Flattened version of string, if needed.  */
   360   /* typically 0.                             */
   361   /* If it's not 0, then the memory is owned  */
   362   /* by this node.                            */
   363   /* In the case of a leaf, this may point to */
   364   /* the same memory as the data field.       */
   365   _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
   366                 allocator_type __a) :
   367     _Refcount_Base(1),
   368     _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
   369   { }
   370 
   371   typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
   372 # ifdef _STLP_HAS_WCHAR_T
   373   typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
   374 # else
   375   typedef __false_type _IsWCharT;
   376 # endif
   377 
   378   typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
   379 
   380 #if 0
   381   /* Please tell why this code is necessary if you uncomment it.
   382    * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
   383    * returns a size > n in order to store the terminating null charater. When
   384    * instanciation type is not a char or wchar_t this is not guaranty resulting in
   385    * memory overrun.
   386    */
   387   static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
   388     // Allow slop for in-place expansion.
   389     return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
   390   }
   391 
   392   static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
   393     // Allow slop for in-place expansion.
   394     return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
   395   }
   396 #endif
   397   // fbp : moved from RopeLeaf
   398   static size_t _S_rounded_up_size(size_t __n)
   399   //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
   400   { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
   401 
   402   static void _S_free_string( _CharT* __s, size_t __len,
   403                              allocator_type __a) {
   404     _STLP_STD::_Destroy_Range(__s, __s + __len);
   405     //  This has to be a static member, so this gets a bit messy
   406 #   ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
   407     __a.deallocate(__s, _S_rounded_up_size(__len));    //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
   408 #   else
   409     __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
   410 #   endif
   411   }
   412 
   413   // Deallocate data section of a leaf.
   414   // This shouldn't be a member function.
   415   // But its hard to do anything else at the
   416   // moment, because it's templatized w.r.t.
   417   // an allocator.
   418   // Does nothing if __GC is defined.
   419   void _M_free_c_string();
   420   void _M_free_tree();
   421   // Deallocate t. Assumes t is not 0.
   422   void _M_unref_nonnil() {
   423     if (_M_decr() == 0) _M_free_tree();
   424   }
   425   void _M_ref_nonnil() {
   426     _M_incr();
   427   }
   428   static void _S_unref(_Self* __t) {
   429     if (0 != __t) {
   430       __t->_M_unref_nonnil();
   431     }
   432   }
   433   static void _S_ref(_Self* __t) {
   434     if (0 != __t) __t->_M_incr();
   435   }
   436   //static void _S_free_if_unref(_Self* __t) {
   437   //  if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
   438   //}
   439 };
   440 
   441 template<class _CharT, class _Alloc>
   442 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
   443 public:
   444   _CharT* _M_data; /* Not necessarily 0 terminated. */
   445                                 /* The allocated size is         */
   446                                 /* _S_rounded_up_size(size), except */
   447                                 /* in the GC case, in which it   */
   448                                 /* doesn't matter.               */
   449 private:
   450   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   451   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
   452   void _M_init(__true_type const& /*_IsBasicCharType*/) {
   453     this->_M_c_string = _M_data;
   454   }
   455   void _M_init(__false_type const& /*_IsBasicCharType*/) {}
   456 
   457 public:
   458   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   459   typedef typename _RopeRep::allocator_type allocator_type;
   460 
   461   _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
   462     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
   463       _M_data(__d) {
   464     _STLP_ASSERT(_p_size > 0)
   465     _M_init(_IsBasicCharType());
   466   }
   467 
   468 # ifdef _STLP_NO_ARROW_OPERATOR
   469   _Rope_RopeLeaf() {}
   470   _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
   471 # endif
   472 
   473 // The constructor assumes that d has been allocated with
   474   // the proper allocator and the properly padded size.
   475   // In contrast, the destructor deallocates the data:
   476   ~_Rope_RopeLeaf() {
   477     if (_M_data != this->_M_c_string) {
   478       this->_M_free_c_string();
   479     }
   480     _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
   481   }
   482 };
   483 
   484 template<class _CharT, class _Alloc>
   485 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
   486 private:
   487   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   488 
   489 public:
   490   _RopeRep* _M_left;
   491   _RopeRep* _M_right;
   492   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   493   typedef typename _RopeRep::allocator_type allocator_type;
   494   _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
   495     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
   496                                    (max)(__l->_M_depth, __r->_M_depth) + 1, false,
   497                                    __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
   498   {}
   499 # ifdef _STLP_NO_ARROW_OPERATOR
   500   _Rope_RopeConcatenation() {}
   501   _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
   502 # endif
   503 
   504   ~_Rope_RopeConcatenation() {
   505     this->_M_free_c_string();
   506     _M_left->_M_unref_nonnil();
   507     _M_right->_M_unref_nonnil();
   508   }
   509 };
   510 
   511 template <class _CharT, class _Alloc>
   512 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
   513 private:
   514   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   515 public:
   516   char_producer<_CharT>* _M_fn;
   517   /*
   518    * Char_producer is owned by the
   519    * rope and should be explicitly
   520    * deleted when the rope becomes
   521    * inaccessible.
   522    */
   523   bool _M_delete_when_done;
   524   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   525   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
   526 # ifdef _STLP_NO_ARROW_OPERATOR
   527   _Rope_RopeFunction() {}
   528   _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
   529 # endif
   530 
   531   _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
   532                      bool __d, allocator_type __a)
   533     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
   534     , _M_delete_when_done(__d)
   535   { _STLP_ASSERT(_p_size > 0) }
   536 
   537   ~_Rope_RopeFunction() {
   538     this->_M_free_c_string();
   539     if (_M_delete_when_done) {
   540       delete _M_fn;
   541     }
   542   }
   543 };
   544 
   545 /*
   546  * Substring results are usually represented using just
   547  * concatenation nodes.  But in the case of very long flat ropes
   548  * or ropes with a functional representation that isn't practical.
   549  * In that case, we represent the __result as a special case of
   550  * RopeFunction, whose char_producer points back to the rope itself.
   551  * In all cases except repeated substring operations and
   552  * deallocation, we treat the __result as a RopeFunction.
   553  */
   554 template<class _CharT, class _Alloc>
   555 struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
   556 public:
   557   // XXX this whole class should be rewritten.
   558   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   559   _RopeRep *_M_base;      // not 0
   560   size_t _M_start;
   561   /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
   562                                 _CharT* __buffer) {
   563     typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
   564     typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
   565     switch (_M_base->_M_tag) {
   566     case _RopeRep::_S_function:
   567     case _RopeRep::_S_substringfn:
   568       {
   569         char_producer<_CharT>* __fn =
   570           __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
   571         _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
   572         _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
   573         (*__fn)(__start_pos + _M_start, __req_len, __buffer);
   574       }
   575       break;
   576     case _RopeRep::_S_leaf:
   577       {
   578         _CharT* __s =
   579           __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
   580         _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
   581       }
   582       break;
   583     default:
   584       _STLP_ASSERT(false)
   585         ;
   586     }
   587   }
   588 
   589   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   590   typedef typename _RopeRep::allocator_type allocator_type;
   591 
   592   _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
   593     : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
   594       _M_base(__b), _M_start(__s) {
   595     _STLP_ASSERT(__l > 0)
   596     _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
   597     _M_base->_M_ref_nonnil();
   598     this->_M_tag = _RopeRep::_S_substringfn;
   599   }
   600   virtual ~_Rope_RopeSubstring()
   601   { _M_base->_M_unref_nonnil(); }
   602 };
   603 
   604 /*
   605  * Self-destructing pointers to Rope_rep.
   606  * These are not conventional smart pointers.  Their
   607  * only purpose in life is to ensure that unref is called
   608  * on the pointer either at normal exit or if an exception
   609  * is raised.  It is the caller's responsibility to
   610  * adjust reference counts when these pointers are initialized
   611  * or assigned to.  (This convention significantly reduces
   612  * the number of potentially expensive reference count
   613  * updates.)
   614  */
   615 template<class _CharT, class _Alloc>
   616 struct _Rope_self_destruct_ptr {
   617   _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
   618   ~_Rope_self_destruct_ptr()
   619   { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
   620 #   ifdef _STLP_USE_EXCEPTIONS
   621   _Rope_self_destruct_ptr() : _M_ptr(0) {}
   622 #   else
   623   _Rope_self_destruct_ptr() {}
   624 #   endif
   625   _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
   626   _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
   627   _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
   628   operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
   629   _Rope_self_destruct_ptr<_CharT, _Alloc>&
   630   operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
   631   { _M_ptr = __x; return *this; }
   632 };
   633 
   634 /*
   635  * Dereferencing a nonconst iterator has to return something
   636  * that behaves almost like a reference.  It's not possible to
   637  * return an actual reference since assignment requires extra
   638  * work.  And we would get into the same problems as with the
   639  * CD2 version of basic_string.
   640  */
   641 template<class _CharT, class _Alloc>
   642 class _Rope_char_ref_proxy {
   643   typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
   644   friend class rope<_CharT,_Alloc>;
   645   friend class _Rope_iterator<_CharT,_Alloc>;
   646   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
   647   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
   648   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   649   typedef rope<_CharT,_Alloc> _My_rope;
   650   size_t _M_pos;
   651   _CharT _M_current;
   652   bool _M_current_valid;
   653   _My_rope* _M_root;     // The whole rope.
   654 public:
   655   _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
   656     _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
   657   _Rope_char_ref_proxy(const _Self& __x) :
   658     _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
   659   // Don't preserve cache if the reference can outlive the
   660   // expression.  We claim that's not possible without calling
   661   // a copy constructor or generating reference to a proxy
   662   // reference.  We declare the latter to have undefined semantics.
   663   _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
   664     : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
   665   inline operator _CharT () const;
   666   _Self& operator= (_CharT __c);
   667   _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
   668   _Self& operator= (const _Self& __c) {
   669     return operator=((_CharT)__c);
   670   }
   671 };
   672 
   673 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   674 template<class _CharT, class __Alloc>
   675 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
   676                  _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
   677   _CharT __tmp = __a;
   678   __a = __b;
   679   __b = __tmp;
   680 }
   681 #else
   682 // There is no really acceptable way to handle this.  The default
   683 // definition of swap doesn't work for proxy references.
   684 // It can't really be made to work, even with ugly hacks, since
   685 // the only unusual operation it uses is the copy constructor, which
   686 // is needed for other purposes.  We provide a macro for
   687 // full specializations, and instantiate the most common case.
   688 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
   689     inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
   690                      _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
   691         _CharT __tmp = __a; \
   692         __a = __b; \
   693         __b = __tmp; \
   694     }
   695 
   696 _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
   697 
   698 # ifndef _STLP_NO_WCHAR_T
   699 _ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
   700 # endif
   701 
   702 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
   703 
   704 template<class _CharT, class _Alloc>
   705 class _Rope_char_ptr_proxy {
   706   // XXX this class should be rewritten.
   707 public:
   708   typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
   709   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
   710   size_t _M_pos;
   711   rope<_CharT,_Alloc>* _M_root;     // The whole rope.
   712 
   713   _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
   714     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
   715   _Rope_char_ptr_proxy(const _Self& __x)
   716     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
   717   _Rope_char_ptr_proxy() {}
   718   _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
   719     _STLP_ASSERT(0 == __x)
   720   }
   721   _Self& operator= (const _Self& __x) {
   722     _M_pos = __x._M_pos;
   723     _M_root = __x._M_root;
   724     return *this;
   725   }
   726 
   727   _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
   728     return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
   729   }
   730 };
   731 
   732 
   733 /*
   734  * Rope iterators:
   735  * Unlike in the C version, we cache only part of the stack
   736  * for rope iterators, since they must be efficiently copyable.
   737  * When we run out of cache, we have to reconstruct the iterator
   738  * value.
   739  * Pointers from iterators are not included in reference counts.
   740  * Iterators are assumed to be thread private.  Ropes can
   741  * be shared.
   742  */
   743 template<class _CharT, class _Alloc>
   744 class _Rope_iterator_base
   745 /*   : public random_access_iterator<_CharT, ptrdiff_t>  */
   746 {
   747   friend class rope<_CharT,_Alloc>;
   748   typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
   749   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
   750 public:
   751   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   752 
   753   enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
   754   enum { _S_iterator_buf_len = 15 };
   755   size_t _M_current_pos;
   756   // The whole rope.
   757   _RopeRep* _M_root;
   758   // Starting position for current leaf
   759   size_t _M_leaf_pos;
   760   // Buffer possibly containing current char.
   761   _CharT* _M_buf_start;
   762   // Pointer to current char in buffer, != 0 ==> buffer valid.
   763   _CharT* _M_buf_ptr;
   764   // One past __last valid char in buffer.
   765   _CharT* _M_buf_end;
   766 
   767   // What follows is the path cache.  We go out of our
   768   // way to make this compact.
   769   // Path_end contains the bottom section of the path from
   770   // the root to the current leaf.
   771   struct {
   772 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
   773     _RopeRep const*_M_data[4];
   774 #  else
   775     _RopeRep const*_M_data[_S_path_cache_len];
   776 #  endif
   777   } _M_path_end;
   778   // Last valid __pos in path_end;
   779   // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
   780   // point to concatenation nodes.
   781   int _M_leaf_index;
   782   // (_M_path_directions >> __i) & 1 is 1
   783   // if we got from _M_path_end[leaf_index - __i - 1]
   784   // to _M_path_end[leaf_index - __i] by going to the
   785   // __right. Assumes path_cache_len <= 9.
   786   unsigned char _M_path_directions;
   787   // Short buffer for surrounding chars.
   788   // This is useful primarily for
   789   // RopeFunctions.  We put the buffer
   790   // here to avoid locking in the
   791   // multithreaded case.
   792   // The cached path is generally assumed to be valid
   793   // only if the buffer is valid.
   794   struct {
   795 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
   796     _CharT _M_data[15];
   797 #  else
   798     _CharT _M_data[_S_iterator_buf_len];
   799 #  endif
   800   } _M_tmp_buf;
   801 
   802   // Set buffer contents given path cache.
   803   static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
   804   // Set buffer contents and path cache.
   805   static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
   806   // As above, but assumes path cache is valid for previous posn.
   807   static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
   808   _Rope_iterator_base() {}
   809   _Rope_iterator_base(_RopeRep* __root, size_t __pos)
   810     : _M_current_pos(__pos),_M_root(__root),  _M_buf_ptr(0) {}
   811   void _M_incr(size_t __n);
   812   void _M_decr(size_t __n);
   813 public:
   814   size_t index() const { return _M_current_pos; }
   815   void _M_copy_buf(const _Self& __x) {
   816     _M_tmp_buf = __x._M_tmp_buf;
   817     if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
   818       _M_buf_start = _M_tmp_buf._M_data;
   819       _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
   820       _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
   821     }
   822   }
   823 
   824   _Rope_iterator_base(const _Self& __x)
   825     : _M_current_pos(__x._M_current_pos), _M_root(__x._M_root),
   826       _M_buf_start(__x._M_buf_start), _M_buf_ptr(__x._M_buf_ptr), _M_path_end(__x._M_path_end),
   827       _M_leaf_index(__x._M_leaf_index), _M_path_directions(__x._M_path_directions) {
   828     if (0 != __x._M_buf_ptr) {
   829       _M_copy_buf(__x);
   830     }
   831   }
   832   _Self& operator = (const _Self& __x) {
   833     _M_current_pos = __x._M_current_pos;
   834     _M_root = __x._M_root;
   835     _M_buf_start = __x._M_buf_start;
   836     _M_buf_ptr = __x._M_buf_ptr;
   837     _M_path_end = __x._M_path_end;
   838     _M_leaf_index = __x._M_leaf_index;
   839     _M_path_directions = __x._M_path_directions;
   840     if (0 != __x._M_buf_ptr) {
   841       _M_copy_buf(__x);
   842     }
   843     return *this;
   844   }
   845 };
   846 
   847 template<class _CharT, class _Alloc> class _Rope_iterator;
   848 
   849 template<class _CharT, class _Alloc>
   850 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
   851   friend class rope<_CharT,_Alloc>;
   852   typedef  _Rope_const_iterator<_CharT, _Alloc> _Self;
   853   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
   854   //  protected:
   855 public:
   856 #   ifndef _STLP_HAS_NO_NAMESPACES
   857   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   858   // The one from the base class may not be directly visible.
   859 #   endif
   860   _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
   861     _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
   862     // Only nonconst iterators modify root ref count
   863   {}
   864 public:
   865   typedef _CharT reference;   // Really a value.  Returning a reference
   866                               // Would be a mess, since it would have
   867                               // to be included in refcount.
   868   typedef const _CharT* pointer;
   869   typedef _CharT value_type;
   870   typedef ptrdiff_t difference_type;
   871   typedef random_access_iterator_tag iterator_category;
   872 
   873 public:
   874   _Rope_const_iterator() {}
   875   _Rope_const_iterator(const _Self& __x) :
   876     _Rope_iterator_base<_CharT,_Alloc>(__x) { }
   877   _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
   878     _Rope_iterator_base<_CharT,_Alloc>(__x) {}
   879   _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
   880     _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
   881   _Self& operator= (const _Self& __x) {
   882     _Base::operator=(__x);
   883     return *this;
   884   }
   885   reference operator*() {
   886     if (0 == this->_M_buf_ptr)
   887 #if !defined (__DMC__)
   888       _S_setcache(*this);
   889 #else
   890     { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
   891 #endif
   892     return *(this->_M_buf_ptr);
   893   }
   894   _Self& operator++() {
   895     _CharT* __next;
   896     if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
   897       this->_M_buf_ptr = __next;
   898       ++this->_M_current_pos;
   899     } else {
   900       this->_M_incr(1);
   901     }
   902     return *this;
   903   }
   904   _Self& operator+=(ptrdiff_t __n) {
   905     if (__n >= 0) {
   906       this->_M_incr(__n);
   907     } else {
   908       this->_M_decr(-__n);
   909     }
   910     return *this;
   911   }
   912   _Self& operator--() {
   913     this->_M_decr(1);
   914     return *this;
   915   }
   916   _Self& operator-=(ptrdiff_t __n) {
   917     if (__n >= 0) {
   918       this->_M_decr(__n);
   919     } else {
   920       this->_M_incr(-__n);
   921     }
   922     return *this;
   923   }
   924   _Self operator++(int) {
   925     size_t __old_pos = this->_M_current_pos;
   926     this->_M_incr(1);
   927     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
   928     // This makes a subsequent dereference expensive.
   929     // Perhaps we should instead copy the iterator
   930     // if it has a valid cache?
   931   }
   932   _Self operator--(int) {
   933     size_t __old_pos = this->_M_current_pos;
   934     this->_M_decr(1);
   935     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
   936   }
   937   inline reference operator[](size_t __n);
   938 };
   939 
   940 template<class _CharT, class _Alloc>
   941 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
   942   friend class rope<_CharT,_Alloc>;
   943   typedef _Rope_iterator<_CharT, _Alloc> _Self;
   944   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
   945   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   946 
   947 public:
   948   rope<_CharT,_Alloc>* _M_root_rope;
   949   // root is treated as a cached version of this,
   950   // and is used to detect changes to the underlying
   951   // rope.
   952   // Root is included in the reference count.
   953   // This is necessary so that we can detect changes reliably.
   954   // Unfortunately, it requires careful bookkeeping for the
   955   // nonGC case.
   956   _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
   957 
   958   void _M_check();
   959 public:
   960   typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
   961   typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
   962   typedef _CharT value_type;
   963   typedef ptrdiff_t difference_type;
   964   typedef random_access_iterator_tag iterator_category;
   965 public:
   966   ~_Rope_iterator() {  //*TY 5/6/00 - added dtor to balance reference count
   967     _RopeRep::_S_unref(this->_M_root);
   968   }
   969 
   970   rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
   971   _Rope_iterator() {
   972     this->_M_root = 0;  // Needed for reference counting.
   973   }
   974   _Rope_iterator(const  _Self& __x) :
   975     _Rope_iterator_base<_CharT,_Alloc>(__x) {
   976     _M_root_rope = __x._M_root_rope;
   977     _RopeRep::_S_ref(this->_M_root);
   978   }
   979   _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
   980   _Self& operator= (const  _Self& __x) {
   981     _RopeRep* __old = this->_M_root;
   982     _RopeRep::_S_ref(__x._M_root);
   983     _Base::operator=(__x);
   984     _M_root_rope = __x._M_root_rope;
   985     _RopeRep::_S_unref(__old);
   986     return *this;
   987   }
   988   reference operator*() {
   989     _M_check();
   990     if (0 == this->_M_buf_ptr) {
   991       return reference(_M_root_rope, this->_M_current_pos);
   992     } else {
   993       return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
   994     }
   995   }
   996   _Self& operator++() {
   997     this->_M_incr(1);
   998     return *this;
   999   }
  1000   _Self& operator+=(ptrdiff_t __n) {
  1001     if (__n >= 0) {
  1002       this->_M_incr(__n);
  1003     } else {
  1004       this->_M_decr(-__n);
  1005     }
  1006     return *this;
  1007   }
  1008   _Self& operator--() {
  1009     this->_M_decr(1);
  1010     return *this;
  1011   }
  1012   _Self& operator-=(ptrdiff_t __n) {
  1013     if (__n >= 0) {
  1014       this->_M_decr(__n);
  1015     } else {
  1016       this->_M_incr(-__n);
  1017     }
  1018     return *this;
  1019   }
  1020   _Self operator++(int) {
  1021     size_t __old_pos = this->_M_current_pos;
  1022     this->_M_incr(1);
  1023     return _Self(_M_root_rope, __old_pos);
  1024   }
  1025   _Self operator--(int) {
  1026     size_t __old_pos = this->_M_current_pos;
  1027     this->_M_decr(1);
  1028     return _Self(_M_root_rope, __old_pos);
  1029   }
  1030   reference operator[](ptrdiff_t __n) {
  1031     return reference(_M_root_rope, this->_M_current_pos + __n);
  1032   }
  1033 };
  1034 
  1035 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
  1036 template <class _CharT, class _Alloc>
  1037 inline random_access_iterator_tag
  1038 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) {  return random_access_iterator_tag();}
  1039 template <class _CharT, class _Alloc>
  1040 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  1041 template <class _CharT, class _Alloc>
  1042 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  1043 template <class _CharT, class _Alloc>
  1044 inline random_access_iterator_tag
  1045 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
  1046 template <class _CharT, class _Alloc>
  1047 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  1048 template <class _CharT, class _Alloc>
  1049 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  1050 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
  1051 
  1052 template <class _CharT, class _Alloc, class _CharConsumer>
  1053 bool _S_apply_to_pieces(_CharConsumer& __c,
  1054                         _Rope_RopeRep<_CharT, _Alloc> *__r,
  1055                         size_t __begin, size_t __end);
  1056                         // begin and end are assumed to be in range.
  1057 
  1058 template <class _CharT, class _Alloc>
  1059 class rope
  1060 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
  1061            : public __stlport_class<rope<_CharT, _Alloc> >
  1062 #endif
  1063 {
  1064   typedef rope<_CharT,_Alloc> _Self;
  1065 public:
  1066   typedef _CharT value_type;
  1067   typedef ptrdiff_t difference_type;
  1068   typedef size_t size_type;
  1069   typedef _CharT const_reference;
  1070   typedef const _CharT* const_pointer;
  1071   typedef _Rope_iterator<_CharT,_Alloc> iterator;
  1072   typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
  1073   typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
  1074   typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
  1075 
  1076   friend class _Rope_iterator<_CharT,_Alloc>;
  1077   friend class _Rope_const_iterator<_CharT,_Alloc>;
  1078   friend struct _Rope_RopeRep<_CharT,_Alloc>;
  1079   friend class _Rope_iterator_base<_CharT,_Alloc>;
  1080   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
  1081   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
  1082   friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
  1083 
  1084   _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
  1085 
  1086 protected:
  1087   typedef _CharT* _Cstrptr;
  1088 
  1089   static _CharT _S_empty_c_str[1];
  1090 
  1091   enum { _S_copy_max = 23 };
  1092   // For strings shorter than _S_copy_max, we copy to
  1093   // concatenate.
  1094 
  1095   typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
  1096   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
  1097 
  1098 public:
  1099   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  1100   typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type  allocator_type;
  1101 
  1102 public:
  1103   // The only data member of a rope:
  1104   _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
  1105 
  1106 public:
  1107   allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
  1108 
  1109 public:
  1110   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
  1111   typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
  1112   typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
  1113   typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
  1114 
  1115   // Retrieve a character at the indicated position.
  1116   static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
  1117 
  1118   // Obtain a pointer to the character at the indicated position.
  1119   // The pointer can be used to change the character.
  1120   // If such a pointer cannot be produced, as is frequently the
  1121   // case, 0 is returned instead.
  1122   // (Returns nonzero only if all nodes in the path have a refcount
  1123   // of 1.)
  1124   static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
  1125 
  1126   static void _S_unref(_RopeRep* __t) {
  1127     _RopeRep::_S_unref(__t);
  1128   }
  1129   static void _S_ref(_RopeRep* __t) {
  1130     _RopeRep::_S_ref(__t);
  1131   }
  1132 
  1133   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
  1134 
  1135   // _Result is counted in refcount.
  1136   static _RopeRep* _S_substring(_RopeRep* __base,
  1137                                 size_t __start, size_t __endp1);
  1138 
  1139   static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
  1140                                        const _CharT* __iter, size_t __slen);
  1141   // Concatenate rope and char ptr, copying __s.
  1142   // Should really take an arbitrary iterator.
  1143   // Result is counted in refcount.
  1144   static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
  1145                                              const _CharT* __iter, size_t __slen);
  1146     // As above, but one reference to __r is about to be
  1147     // destroyed.  Thus the pieces may be recycled if all
  1148     // relevent reference counts are 1.
  1149 
  1150   // General concatenation on _RopeRep.  _Result
  1151   // has refcount of 1.  Adjusts argument refcounts.
  1152   static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
  1153 
  1154 public:
  1155 #if defined (_STLP_MEMBER_TEMPLATES)
  1156   template <class _CharConsumer>
  1157 #else
  1158   typedef _Rope_char_consumer<_CharT> _CharConsumer;
  1159 #endif
  1160   void apply_to_pieces(size_t __begin, size_t __end,
  1161                        _CharConsumer& __c) const
  1162   { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
  1163 
  1164 protected:
  1165 
  1166   static size_t _S_rounded_up_size(size_t __n)
  1167   { return _RopeRep::_S_rounded_up_size(__n); }
  1168 
  1169   // Allocate and construct a RopeLeaf using the supplied allocator
  1170   // Takes ownership of s instead of copying.
  1171   static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
  1172                                     size_t _p_size, allocator_type __a) {
  1173     _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1174                                                 _RopeLeaf).allocate(1);
  1175     _STLP_TRY {
  1176       _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
  1177     }
  1178    _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
  1179                                        _RopeLeaf).deallocate(__space, 1))
  1180     return __space;
  1181   }
  1182 
  1183   static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
  1184                                                       allocator_type __a) {
  1185    _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1186                                                         _RopeConcatenation).allocate(1);
  1187     return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
  1188   }
  1189 
  1190   static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
  1191                                             size_t _p_size, bool __d, allocator_type __a) {
  1192    _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1193                                                    _RopeFunction).allocate(1);
  1194     return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
  1195   }
  1196 
  1197   static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
  1198                                               size_t __l, allocator_type __a) {
  1199    _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1200                                                     _RopeSubstring).allocate(1);
  1201     return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
  1202   }
  1203 
  1204   static
  1205   _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
  1206                                                size_t _p_size, allocator_type __a) {
  1207     if (0 == _p_size) return 0;
  1208 
  1209    _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
  1210 
  1211     _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
  1212     _S_construct_null(__buf + _p_size);
  1213 
  1214     _STLP_TRY {
  1215       return _S_new_RopeLeaf(__buf, _p_size, __a);
  1216     }
  1217     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
  1218     _STLP_RET_AFTER_THROW(0)
  1219   }
  1220 
  1221 
  1222   // Concatenation of nonempty strings.
  1223   // Always builds a concatenation node.
  1224   // Rebalances if the result is too deep.
  1225   // Result has refcount 1.
  1226   // Does not increment left and right ref counts even though
  1227   // they are referenced.
  1228   static _RopeRep*
  1229   _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
  1230 
  1231   // Concatenation helper functions
  1232   static _RopeLeaf*
  1233   _S_leaf_concat_char_iter(_RopeLeaf* __r,
  1234                            const _CharT* __iter, size_t __slen);
  1235   // Concatenate by copying leaf.
  1236   // should take an arbitrary iterator
  1237   // result has refcount 1.
  1238   static _RopeLeaf* _S_destr_leaf_concat_char_iter
  1239   (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
  1240   // A version that potentially clobbers __r if __r->_M_ref_count == 1.
  1241 
  1242 
  1243   // A helper function for exponentiating strings.
  1244   // This uses a nonstandard refcount convention.
  1245   // The result has refcount 0.
  1246   typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1247 #if !defined (__GNUC__) || (__GNUC__ < 3)
  1248   friend _Concat_fn;
  1249 #else
  1250   friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
  1251 #endif
  1252 
  1253 public:
  1254   static size_t _S_char_ptr_len(const _CharT* __s) {
  1255     return char_traits<_CharT>::length(__s);
  1256   }
  1257 
  1258 public: /* for operators */
  1259   rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
  1260     : _M_tree_ptr(__a, __t) { }
  1261 private:
  1262   // Copy __r to the _CharT buffer.
  1263   // Returns __buffer + __r->_M_size._M_data.
  1264   // Assumes that buffer is uninitialized.
  1265   static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
  1266 
  1267   // Again, with explicit starting position and length.
  1268   // Assumes that buffer is uninitialized.
  1269   static _CharT* _S_flatten(_RopeRep* __r,
  1270                             size_t __start, size_t __len,
  1271                             _CharT* __buffer);
  1272 
  1273   // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
  1274 public:
  1275   static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
  1276 protected:
  1277   static bool _S_is_balanced(_RopeRep* __r)
  1278   { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
  1279 
  1280   static bool _S_is_almost_balanced(_RopeRep* __r) {
  1281     return (__r->_M_depth == 0 ||
  1282             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
  1283   }
  1284 
  1285   static bool _S_is_roughly_balanced(_RopeRep* __r) {
  1286     return (__r->_M_depth <= 1 ||
  1287             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
  1288   }
  1289 
  1290   // Assumes the result is not empty.
  1291   static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
  1292                                               _RopeRep* __right) {
  1293     _RopeRep* __result = _S_concat_rep(__left, __right);
  1294     if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
  1295     return __result;
  1296   }
  1297 
  1298   // The basic rebalancing operation.  Logically copies the
  1299   // rope.  The result has refcount of 1.  The client will
  1300   // usually decrement the reference count of __r.
  1301   // The result is within height 2 of balanced by the above
  1302   // definition.
  1303   static _RopeRep* _S_balance(_RopeRep* __r);
  1304 
  1305   // Add all unbalanced subtrees to the forest of balanceed trees.
  1306   // Used only by balance.
  1307   static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
  1308 
  1309   // Add __r to forest, assuming __r is already balanced.
  1310   static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
  1311 
  1312 #ifdef _STLP_DEBUG
  1313   // Print to stdout, exposing structure
  1314   static void _S_dump(_RopeRep* __r, int __indent = 0);
  1315 #endif
  1316 
  1317   // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
  1318   static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
  1319 
  1320   void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
  1321 
  1322   void _M_reset(_RopeRep* __r) {
  1323     //if (__r != _M_tree_ptr._M_data) {
  1324       _S_unref(_M_tree_ptr._M_data);
  1325       _M_tree_ptr._M_data = __r;
  1326     //}
  1327   }
  1328 
  1329 public:
  1330   bool empty() const { return 0 == _M_tree_ptr._M_data; }
  1331 
  1332   // Comparison member function.  This is public only for those
  1333   // clients that need a ternary comparison.  Others
  1334   // should use the comparison operators below.
  1335   int compare(const _Self& __y) const {
  1336     return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
  1337   }
  1338 
  1339   rope(const _CharT* __s, const allocator_type& __a = allocator_type())
  1340     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
  1341   {}
  1342 
  1343   rope(const _CharT* __s, size_t __len,
  1344        const allocator_type& __a = allocator_type())
  1345     : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
  1346   {}
  1347 
  1348   // Should perhaps be templatized with respect to the iterator type
  1349   // and use Sequence_buffer.  (It should perhaps use sequence_buffer
  1350   // even now.)
  1351   rope(const _CharT *__s, const _CharT *__e,
  1352        const allocator_type& __a = allocator_type())
  1353     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
  1354   {}
  1355 
  1356   rope(const const_iterator& __s, const const_iterator& __e,
  1357        const allocator_type& __a = allocator_type())
  1358     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1359                                     __e._M_current_pos))
  1360   {}
  1361 
  1362   rope(const iterator& __s, const iterator& __e,
  1363        const allocator_type& __a = allocator_type())
  1364     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1365                                     __e._M_current_pos))
  1366   {}
  1367 
  1368   rope(_CharT __c, const allocator_type& __a = allocator_type())
  1369     : _M_tree_ptr(__a, (_RopeRep*)0) {
  1370     _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
  1371 
  1372     _Copy_Construct(__buf, __c);
  1373     _S_construct_null(__buf + 1);
  1374 
  1375     _STLP_TRY {
  1376       _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
  1377     }
  1378     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
  1379   }
  1380 
  1381   rope(size_t __n, _CharT __c,
  1382        const allocator_type& __a = allocator_type()):
  1383     _M_tree_ptr(__a, (_RopeRep*)0) {
  1384     if (0 == __n)
  1385       return;
  1386 
  1387     rope<_CharT,_Alloc> __result;
  1388 # define  __exponentiate_threshold size_t(32)
  1389     _RopeRep* __remainder;
  1390     rope<_CharT,_Alloc> __remainder_rope;
  1391 
  1392     // gcc-2.7.2 bugs
  1393     typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1394 
  1395     size_t __exponent = __n / __exponentiate_threshold;
  1396     size_t __rest = __n % __exponentiate_threshold;
  1397     if (0 == __rest) {
  1398       __remainder = 0;
  1399     } else {
  1400       _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
  1401       uninitialized_fill_n(__rest_buffer, __rest, __c);
  1402       _S_construct_null(__rest_buffer + __rest);
  1403       _STLP_TRY {
  1404         __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
  1405       }
  1406       _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
  1407     }
  1408     __remainder_rope._M_tree_ptr._M_data = __remainder;
  1409     if (__exponent != 0) {
  1410       _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
  1411       _RopeLeaf* __base_leaf;
  1412       rope<_CharT,_Alloc> __base_rope;
  1413       uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
  1414       _S_construct_null(__base_buffer + __exponentiate_threshold);
  1415       _STLP_TRY {
  1416         __base_leaf = _S_new_RopeLeaf(__base_buffer,
  1417                                       __exponentiate_threshold, __a);
  1418       }
  1419       _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
  1420                                             __exponentiate_threshold, __a))
  1421       __base_rope._M_tree_ptr._M_data = __base_leaf;
  1422       if (1 == __exponent) {
  1423         __result = __base_rope;
  1424         // One each for base_rope and __result
  1425         //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
  1426       } else {
  1427         __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
  1428       }
  1429       if (0 != __remainder) {
  1430         __result += __remainder_rope;
  1431       }
  1432     } else {
  1433       __result = __remainder_rope;
  1434     }
  1435     _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
  1436     _M_tree_ptr._M_data->_M_ref_nonnil();
  1437 # undef __exponentiate_threshold
  1438   }
  1439 
  1440   rope(const allocator_type& __a = allocator_type())
  1441     : _M_tree_ptr(__a, (_RopeRep*)0) {}
  1442 
  1443   // Construct a rope from a function that can compute its members
  1444   rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
  1445        const allocator_type& __a = allocator_type())
  1446     : _M_tree_ptr(__a, (_RopeRep*)0) {
  1447     _M_tree_ptr._M_data = (0 == __len) ?
  1448       0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
  1449   }
  1450 
  1451   rope(const _Self& __x)
  1452     : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
  1453     _S_ref(_M_tree_ptr._M_data);
  1454   }
  1455 
  1456   rope(__move_source<_Self> __src)
  1457     : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
  1458     __src.get()._M_tree_ptr._M_data = 0;
  1459   }
  1460 
  1461   ~rope() {
  1462     _S_unref(_M_tree_ptr._M_data);
  1463   }
  1464 
  1465   _Self& operator=(const _Self& __x) {
  1466     _STLP_ASSERT(get_allocator() == __x.get_allocator())
  1467     _S_ref(__x._M_tree_ptr._M_data);
  1468     _M_reset(__x._M_tree_ptr._M_data);
  1469     return *this;
  1470   }
  1471 
  1472   void clear() {
  1473     _S_unref(_M_tree_ptr._M_data);
  1474     _M_tree_ptr._M_data = 0;
  1475   }
  1476   void push_back(_CharT __x) {
  1477     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
  1478   }
  1479 
  1480   void pop_back() {
  1481     _RopeRep* __old = _M_tree_ptr._M_data;
  1482     _M_tree_ptr._M_data =
  1483       _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1484     _S_unref(__old);
  1485   }
  1486 
  1487   _CharT back() const {
  1488     return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1489   }
  1490 
  1491   void push_front(_CharT __x) {
  1492     _RopeRep* __old = _M_tree_ptr._M_data;
  1493     _RopeRep* __left =
  1494       _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
  1495     _STLP_TRY {
  1496       _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
  1497       _S_unref(__old);
  1498       _S_unref(__left);
  1499     }
  1500     _STLP_UNWIND(_S_unref(__left))
  1501   }
  1502 
  1503   void pop_front() {
  1504     _RopeRep* __old = _M_tree_ptr._M_data;
  1505     _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
  1506     _S_unref(__old);
  1507   }
  1508 
  1509   _CharT front() const {
  1510     return _S_fetch(_M_tree_ptr._M_data, 0);
  1511   }
  1512 
  1513   void balance() {
  1514     _RopeRep* __old = _M_tree_ptr._M_data;
  1515     _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
  1516     _S_unref(__old);
  1517   }
  1518 
  1519   void copy(_CharT* __buffer) const {
  1520     _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
  1521     _S_flatten(_M_tree_ptr._M_data, __buffer);
  1522   }
  1523 
  1524   /*
  1525    * This is the copy function from the standard, but
  1526    * with the arguments reordered to make it consistent with the
  1527    * rest of the interface.
  1528    * Note that this guaranteed not to compile if the draft standard
  1529    * order is assumed.
  1530    */
  1531   size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
  1532     size_t _p_size = size();
  1533     size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
  1534 
  1535     _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
  1536     _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
  1537     return __len;
  1538   }
  1539 
  1540 # ifdef _STLP_DEBUG
  1541   // Print to stdout, exposing structure.  May be useful for
  1542   // performance debugging.
  1543   void dump() {
  1544     _S_dump(_M_tree_ptr._M_data);
  1545   }
  1546 # endif
  1547 
  1548   // Convert to 0 terminated string in new allocated memory.
  1549   // Embedded 0s in the input do not terminate the copy.
  1550   const _CharT* c_str() const;
  1551 
  1552   // As above, but also use the flattened representation as the
  1553   // the new rope representation.
  1554   const _CharT* replace_with_c_str();
  1555 
  1556   // Reclaim memory for the c_str generated flattened string.
  1557   // Intentionally undocumented, since it's hard to say when this
  1558   // is safe for multiple threads.
  1559   void delete_c_str () {
  1560     if (0 == _M_tree_ptr._M_data) return;
  1561     if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
  1562         ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
  1563         _M_tree_ptr._M_data->_M_c_string) {
  1564       // Representation shared
  1565       return;
  1566     }
  1567     _M_tree_ptr._M_data->_M_free_c_string();
  1568     _M_tree_ptr._M_data->_M_c_string = 0;
  1569   }
  1570 
  1571   _CharT operator[] (size_type __pos) const {
  1572     return _S_fetch(_M_tree_ptr._M_data, __pos);
  1573   }
  1574 
  1575   _CharT at(size_type __pos) const {
  1576     if (__pos >= size()) _M_throw_out_of_range();
  1577     return (*this)[__pos];
  1578   }
  1579 
  1580   const_iterator begin() const {
  1581     return(const_iterator(_M_tree_ptr._M_data, 0));
  1582   }
  1583 
  1584   // An easy way to get a const iterator from a non-const container.
  1585   const_iterator const_begin() const {
  1586     return(const_iterator(_M_tree_ptr._M_data, 0));
  1587   }
  1588 
  1589   const_iterator end() const {
  1590     return(const_iterator(_M_tree_ptr._M_data, size()));
  1591   }
  1592 
  1593   const_iterator const_end() const {
  1594     return(const_iterator(_M_tree_ptr._M_data, size()));
  1595   }
  1596 
  1597   size_type size() const {
  1598     return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
  1599   }
  1600 
  1601   size_type length() const {
  1602     return size();
  1603   }
  1604 
  1605   size_type max_size() const {
  1606     return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
  1607     //  Guarantees that the result can be sufficiently
  1608     //  balanced.  Longer ropes will probably still work,
  1609     //  but it's harder to make guarantees.
  1610   }
  1611 
  1612   const_reverse_iterator rbegin() const {
  1613     return const_reverse_iterator(end());
  1614   }
  1615 
  1616   const_reverse_iterator const_rbegin() const {
  1617     return const_reverse_iterator(end());
  1618   }
  1619 
  1620   const_reverse_iterator rend() const {
  1621     return const_reverse_iterator(begin());
  1622   }
  1623 
  1624   const_reverse_iterator const_rend() const {
  1625     return const_reverse_iterator(begin());
  1626   }
  1627   // The symmetric cases are intentionally omitted, since they're presumed
  1628   // to be less common, and we don't handle them as well.
  1629 
  1630   // The following should really be templatized.
  1631   // The first argument should be an input iterator or
  1632   // forward iterator with value_type _CharT.
  1633   _Self& append(const _CharT* __iter, size_t __n) {
  1634     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
  1635     return *this;
  1636   }
  1637 
  1638   _Self& append(const _CharT* __c_string) {
  1639     size_t __len = _S_char_ptr_len(__c_string);
  1640     append(__c_string, __len);
  1641     return *this;
  1642   }
  1643 
  1644   _Self& append(const _CharT* __s, const _CharT* __e) {
  1645     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
  1646     return *this;
  1647   }
  1648 
  1649   _Self& append(const_iterator __s, const_iterator __e) {
  1650     _STLP_ASSERT(__s._M_root == __e._M_root)
  1651     _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
  1652     _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
  1653     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
  1654     return *this;
  1655   }
  1656 
  1657   _Self& append(_CharT __c) {
  1658     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
  1659     return *this;
  1660   }
  1661 
  1662   _Self& append() { return append(_CharT()); }  // XXX why?
  1663 
  1664   _Self& append(const _Self& __y) {
  1665     _STLP_ASSERT(__y.get_allocator() == get_allocator())
  1666     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
  1667     return *this;
  1668   }
  1669 
  1670   _Self& append(size_t __n, _CharT __c) {
  1671     rope<_CharT,_Alloc> __last(__n, __c);
  1672     return append(__last);
  1673   }
  1674 
  1675   void swap(_Self& __b) {
  1676     _M_tree_ptr.swap(__b._M_tree_ptr);
  1677   }
  1678 
  1679 protected:
  1680   // Result is included in refcount.
  1681   static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
  1682                            size_t __pos2, _RopeRep* __r) {
  1683     if (0 == __old) { _S_ref(__r); return __r; }
  1684     _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
  1685     _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
  1686     _STLP_MPWFIX_TRY  //*TY 06/01/2000 -
  1687     _RopeRep* __result;
  1688 
  1689     if (0 == __r) {
  1690       __result = _S_concat_rep(__left, __right);
  1691     } else {
  1692       _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
  1693       _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
  1694       __result = _S_concat_rep(__left_result, __right);
  1695     }
  1696     return __result;
  1697     _STLP_MPWFIX_CATCH  //*TY 06/01/2000 -
  1698   }
  1699 
  1700 public:
  1701   void insert(size_t __p, const _Self& __r) {
  1702     if (__p > size()) _M_throw_out_of_range();
  1703     _STLP_ASSERT(get_allocator() == __r.get_allocator())
  1704     _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
  1705   }
  1706 
  1707   void insert(size_t __p, size_t __n, _CharT __c) {
  1708     rope<_CharT,_Alloc> __r(__n,__c);
  1709     insert(__p, __r);
  1710   }
  1711 
  1712   void insert(size_t __p, const _CharT* __i, size_t __n) {
  1713     if (__p > size()) _M_throw_out_of_range();
  1714     _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
  1715     _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
  1716     _Self_destruct_ptr __left_result(
  1717                                      _S_concat_char_iter(__left, __i, __n));
  1718     // _S_ destr_concat_char_iter should be safe here.
  1719     // But as it stands it's probably not a win, since __left
  1720     // is likely to have additional references.
  1721     _M_reset(_S_concat_rep(__left_result, __right));
  1722   }
  1723 
  1724   void insert(size_t __p, const _CharT* __c_string) {
  1725     insert(__p, __c_string, _S_char_ptr_len(__c_string));
  1726   }
  1727 
  1728   void insert(size_t __p, _CharT __c) {
  1729     insert(__p, &__c, 1);
  1730   }
  1731 
  1732   void insert(size_t __p) {
  1733     _CharT __c = _CharT();
  1734     insert(__p, &__c, 1);
  1735   }
  1736 
  1737   void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
  1738     _Self __r(__i, __j);
  1739     insert(__p, __r);
  1740   }
  1741 
  1742   void insert(size_t __p, const const_iterator& __i,
  1743                           const const_iterator& __j) {
  1744     _Self __r(__i, __j);
  1745     insert(__p, __r);
  1746   }
  1747 
  1748   void insert(size_t __p, const iterator& __i,
  1749                           const iterator& __j) {
  1750     _Self __r(__i, __j);
  1751     insert(__p, __r);
  1752   }
  1753 
  1754   // (position, length) versions of replace operations:
  1755   void replace(size_t __p, size_t __n, const _Self& __r) {
  1756     if (__p > size()) _M_throw_out_of_range();
  1757     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
  1758   }
  1759 
  1760   void replace(size_t __p, size_t __n,
  1761                const _CharT* __i, size_t __i_len) {
  1762     _Self __r(__i, __i_len);
  1763     replace(__p, __n, __r);
  1764   }
  1765 
  1766   void replace(size_t __p, size_t __n, _CharT __c) {
  1767     _Self __r(__c);
  1768     replace(__p, __n, __r);
  1769   }
  1770 
  1771   void replace(size_t __p, size_t __n, const _CharT* __c_string) {
  1772     _Self __r(__c_string);
  1773     replace(__p, __n, __r);
  1774   }
  1775 
  1776   void replace(size_t __p, size_t __n,
  1777                const _CharT* __i, const _CharT* __j) {
  1778     _Self __r(__i, __j);
  1779     replace(__p, __n, __r);
  1780   }
  1781 
  1782   void replace(size_t __p, size_t __n,
  1783                const const_iterator& __i, const const_iterator& __j) {
  1784     _Self __r(__i, __j);
  1785     replace(__p, __n, __r);
  1786   }
  1787 
  1788   void replace(size_t __p, size_t __n,
  1789                const iterator& __i, const iterator& __j) {
  1790     _Self __r(__i, __j);
  1791     replace(__p, __n, __r);
  1792   }
  1793 
  1794   // Single character variants:
  1795   void replace(size_t __p, _CharT __c) {
  1796     if (__p > size()) _M_throw_out_of_range();
  1797     iterator __i(this, __p);
  1798     *__i = __c;
  1799   }
  1800 
  1801   void replace(size_t __p, const _Self& __r) {
  1802     replace(__p, 1, __r);
  1803   }
  1804 
  1805   void replace(size_t __p, const _CharT* __i, size_t __i_len) {
  1806     replace(__p, 1, __i, __i_len);
  1807   }
  1808 
  1809   void replace(size_t __p, const _CharT* __c_string) {
  1810     replace(__p, 1, __c_string);
  1811   }
  1812 
  1813   void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
  1814     replace(__p, 1, __i, __j);
  1815   }
  1816 
  1817   void replace(size_t __p, const const_iterator& __i,
  1818                            const const_iterator& __j) {
  1819     replace(__p, 1, __i, __j);
  1820   }
  1821 
  1822   void replace(size_t __p, const iterator& __i,
  1823                            const iterator& __j) {
  1824     replace(__p, 1, __i, __j);
  1825   }
  1826 
  1827   // Erase, (position, size) variant.
  1828   void erase(size_t __p, size_t __n) {
  1829     if (__p > size()) _M_throw_out_of_range();
  1830     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
  1831   }
  1832 
  1833   // Erase, single character
  1834   void erase(size_t __p) {
  1835     erase(__p, __p + 1);
  1836   }
  1837 
  1838   // Insert, iterator variants.
  1839   iterator insert(const iterator& __p, const _Self& __r)
  1840   { insert(__p.index(), __r); return __p; }
  1841   iterator insert(const iterator& __p, size_t __n, _CharT __c)
  1842   { insert(__p.index(), __n, __c); return __p; }
  1843   iterator insert(const iterator& __p, _CharT __c)
  1844   { insert(__p.index(), __c); return __p; }
  1845   iterator insert(const iterator& __p )
  1846   { insert(__p.index()); return __p; }
  1847   iterator insert(const iterator& __p, const _CharT* c_string)
  1848   { insert(__p.index(), c_string); return __p; }
  1849   iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
  1850   { insert(__p.index(), __i, __n); return __p; }
  1851   iterator insert(const iterator& __p, const _CharT* __i,
  1852                   const _CharT* __j)
  1853   { insert(__p.index(), __i, __j);  return __p; }
  1854   iterator insert(const iterator& __p,
  1855                   const const_iterator& __i, const const_iterator& __j)
  1856   { insert(__p.index(), __i, __j); return __p; }
  1857   iterator insert(const iterator& __p,
  1858                   const iterator& __i, const iterator& __j)
  1859   { insert(__p.index(), __i, __j); return __p; }
  1860 
  1861   // Replace, range variants.
  1862   void replace(const iterator& __p, const iterator& __q,
  1863                const _Self& __r)
  1864   { replace(__p.index(), __q.index() - __p.index(), __r); }
  1865   void replace(const iterator& __p, const iterator& __q, _CharT __c)
  1866   { replace(__p.index(), __q.index() - __p.index(), __c); }
  1867   void replace(const iterator& __p, const iterator& __q,
  1868                const _CharT* __c_string)
  1869   { replace(__p.index(), __q.index() - __p.index(), __c_string); }
  1870   void replace(const iterator& __p, const iterator& __q,
  1871                const _CharT* __i, size_t __n)
  1872   { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
  1873   void replace(const iterator& __p, const iterator& __q,
  1874                const _CharT* __i, const _CharT* __j)
  1875   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1876   void replace(const iterator& __p, const iterator& __q,
  1877                const const_iterator& __i, const const_iterator& __j)
  1878   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1879   void replace(const iterator& __p, const iterator& __q,
  1880                const iterator& __i, const iterator& __j)
  1881   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1882 
  1883   // Replace, iterator variants.
  1884   void replace(const iterator& __p, const _Self& __r)
  1885   { replace(__p.index(), __r); }
  1886   void replace(const iterator& __p, _CharT __c)
  1887   { replace(__p.index(), __c); }
  1888   void replace(const iterator& __p, const _CharT* __c_string)
  1889   { replace(__p.index(), __c_string); }
  1890   void replace(const iterator& __p, const _CharT* __i, size_t __n)
  1891   { replace(__p.index(), __i, __n); }
  1892   void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
  1893   { replace(__p.index(), __i, __j); }
  1894   void replace(const iterator& __p, const_iterator __i,
  1895                const_iterator __j)
  1896   { replace(__p.index(), __i, __j); }
  1897   void replace(const iterator& __p, iterator __i, iterator __j)
  1898   { replace(__p.index(), __i, __j); }
  1899 
  1900   // Iterator and range variants of erase
  1901   iterator erase(const iterator& __p, const iterator& __q) {
  1902     size_t __p_index = __p.index();
  1903     erase(__p_index, __q.index() - __p_index);
  1904     return iterator(this, __p_index);
  1905   }
  1906   iterator erase(const iterator& __p) {
  1907     size_t __p_index = __p.index();
  1908     erase(__p_index, 1);
  1909     return iterator(this, __p_index);
  1910   }
  1911 
  1912   _Self substr(size_t __start, size_t __len = 1) const {
  1913     if (__start > size()) _M_throw_out_of_range();
  1914     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
  1915   }
  1916 
  1917   _Self substr(iterator __start, iterator __end) const {
  1918     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  1919   }
  1920 
  1921   _Self substr(iterator __start) const {
  1922     size_t __pos = __start.index();
  1923     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  1924   }
  1925 
  1926   _Self substr(const_iterator __start, const_iterator __end) const {
  1927     // This might eventually take advantage of the cache in the
  1928     // iterator.
  1929     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  1930   }
  1931 
  1932   rope<_CharT,_Alloc> substr(const_iterator __start) {
  1933     size_t __pos = __start.index();
  1934     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  1935   }
  1936 
  1937 #include <stl/_string_npos.h>
  1938 
  1939   size_type find(const _Self& __s, size_type __pos = 0) const {
  1940     if (__pos >= size())
  1941 # ifndef _STLP_OLD_ROPE_SEMANTICS
  1942       return npos;
  1943 # else
  1944       return size();
  1945 # endif
  1946 
  1947     size_type __result_pos;
  1948     const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
  1949     __result_pos = __result.index();
  1950 # ifndef _STLP_OLD_ROPE_SEMANTICS
  1951     if (__result_pos == size()) __result_pos = npos;
  1952 # endif
  1953     return __result_pos;
  1954   }
  1955   size_type find(_CharT __c, size_type __pos = 0) const;
  1956   size_type find(const _CharT* __s, size_type __pos = 0) const {
  1957     size_type __result_pos;
  1958     const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
  1959                                      __s, __s + _S_char_ptr_len(__s));
  1960     __result_pos = __result.index();
  1961 # ifndef _STLP_OLD_ROPE_SEMANTICS
  1962     if (__result_pos == size()) __result_pos = npos;
  1963 # endif
  1964     return __result_pos;
  1965   }
  1966 
  1967   iterator mutable_begin() {
  1968     return(iterator(this, 0));
  1969   }
  1970 
  1971   iterator mutable_end() {
  1972     return(iterator(this, size()));
  1973   }
  1974 
  1975   reverse_iterator mutable_rbegin() {
  1976     return reverse_iterator(mutable_end());
  1977   }
  1978 
  1979   reverse_iterator mutable_rend() {
  1980     return reverse_iterator(mutable_begin());
  1981   }
  1982 
  1983   reference mutable_reference_at(size_type __pos) {
  1984     return reference(this, __pos);
  1985   }
  1986 
  1987 # ifdef __STD_STUFF
  1988   reference operator[] (size_type __pos) {
  1989     return reference(this, __pos);
  1990   }
  1991 
  1992   reference at(size_type __pos) {
  1993     if (__pos >= size()) _M_throw_out_of_range();
  1994     return (*this)[__pos];
  1995   }
  1996 
  1997   void resize(size_type, _CharT) {}
  1998   void resize(size_type) {}
  1999   void reserve(size_type = 0) {}
  2000   size_type capacity() const {
  2001     return max_size();
  2002   }
  2003 
  2004   // Stuff below this line is dangerous because it's error prone.
  2005   // I would really like to get rid of it.
  2006   // copy function with funny arg ordering.
  2007   size_type copy(_CharT* __buffer, size_type __n,
  2008                  size_type __pos = 0) const {
  2009     return copy(__pos, __n, __buffer);
  2010   }
  2011 
  2012   iterator end() { return mutable_end(); }
  2013 
  2014   iterator begin() { return mutable_begin(); }
  2015 
  2016   reverse_iterator rend() { return mutable_rend(); }
  2017 
  2018   reverse_iterator rbegin() { return mutable_rbegin(); }
  2019 
  2020 # else
  2021 
  2022   const_iterator end() { return const_end(); }
  2023 
  2024   const_iterator begin() { return const_begin(); }
  2025 
  2026   const_reverse_iterator rend() { return const_rend(); }
  2027 
  2028   const_reverse_iterator rbegin() { return const_rbegin(); }
  2029 
  2030 # endif
  2031 }; //class rope
  2032 
  2033 #if !defined (_STLP_STATIC_CONST_INIT_BUG)
  2034 #  if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
  2035 template <class _CharT, class _Alloc>
  2036 const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
  2037 #  endif
  2038 #endif
  2039 
  2040 template <class _CharT, class _Alloc>
  2041 inline _CharT
  2042 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
  2043 { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
  2044 
  2045 template <class _CharT, class _Alloc>
  2046 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2047                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2048   return (__x._M_current_pos == __y._M_current_pos &&
  2049           __x._M_root == __y._M_root);
  2050 }
  2051 
  2052 template <class _CharT, class _Alloc>
  2053 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2054                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2055 { return (__x._M_current_pos < __y._M_current_pos); }
  2056 
  2057 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2058 
  2059 template <class _CharT, class _Alloc>
  2060 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2061                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2062 { return !(__x == __y); }
  2063 
  2064 template <class _CharT, class _Alloc>
  2065 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2066                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2067 { return __y < __x; }
  2068 
  2069 template <class _CharT, class _Alloc>
  2070 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2071                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2072 { return !(__y < __x); }
  2073 
  2074 template <class _CharT, class _Alloc>
  2075 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2076                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2077 { return !(__x < __y); }
  2078 
  2079 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2080 
  2081 template <class _CharT, class _Alloc>
  2082 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2083                            const _Rope_const_iterator<_CharT,_Alloc>& __y)
  2084 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
  2085 
  2086 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
  2087 template <class _CharT, class _Alloc>
  2088 inline _Rope_const_iterator<_CharT,_Alloc>
  2089 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
  2090 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
  2091 # endif
  2092 
  2093 template <class _CharT, class _Alloc>
  2094 inline _Rope_const_iterator<_CharT,_Alloc>
  2095 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
  2096 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
  2097 
  2098 template <class _CharT, class _Alloc>
  2099 inline _Rope_const_iterator<_CharT,_Alloc>
  2100 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
  2101 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
  2102 
  2103 template <class _CharT, class _Alloc>
  2104 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
  2105                         const _Rope_iterator<_CharT,_Alloc>& __y) {
  2106   return (__x._M_current_pos == __y._M_current_pos &&
  2107           __x._M_root_rope == __y._M_root_rope);
  2108 }
  2109 
  2110 template <class _CharT, class _Alloc>
  2111 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
  2112                        const _Rope_iterator<_CharT,_Alloc>& __y)
  2113 { return (__x._M_current_pos < __y._M_current_pos); }
  2114 
  2115 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
  2116 template <class _CharT, class _Alloc>
  2117 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2118                         const _Rope_iterator<_CharT,_Alloc>& __y)
  2119 { return !(__x == __y); }
  2120 
  2121 template <class _CharT, class _Alloc>
  2122 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
  2123                        const _Rope_iterator<_CharT,_Alloc>& __y)
  2124 { return __y < __x; }
  2125 
  2126 template <class _CharT, class _Alloc>
  2127 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2128                         const _Rope_iterator<_CharT,_Alloc>& __y)
  2129 { return !(__y < __x); }
  2130 
  2131 template <class _CharT, class _Alloc>
  2132 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2133                         const _Rope_iterator<_CharT,_Alloc>& __y)
  2134 { return !(__x < __y); }
  2135 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2136 
  2137 template <class _CharT, class _Alloc>
  2138 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  2139                            const _Rope_iterator<_CharT,_Alloc>& __y)
  2140 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
  2141 
  2142 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
  2143 template <class _CharT, class _Alloc>
  2144 inline _Rope_iterator<_CharT,_Alloc>
  2145 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  2146           ptrdiff_t __n) {
  2147   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
  2148 }
  2149 # endif
  2150 
  2151 template <class _CharT, class _Alloc>
  2152 inline _Rope_iterator<_CharT,_Alloc>
  2153 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
  2154           ptrdiff_t __n) {
  2155   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
  2156 }
  2157 
  2158 template <class _CharT, class _Alloc>
  2159 inline _Rope_iterator<_CharT,_Alloc>
  2160 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
  2161   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
  2162 }
  2163 
  2164 template <class _CharT, class _Alloc>
  2165 inline rope<_CharT,_Alloc>
  2166 operator+ (const rope<_CharT,_Alloc>& __left,
  2167            const rope<_CharT,_Alloc>& __right) {
  2168   _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
  2169   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
  2170   // Inlining this should make it possible to keep __left and __right in registers.
  2171 }
  2172 
  2173 template <class _CharT, class _Alloc>
  2174 inline rope<_CharT,_Alloc>&
  2175 operator+= (rope<_CharT,_Alloc>& __left,
  2176             const rope<_CharT,_Alloc>& __right) {
  2177   __left.append(__right);
  2178   return __left;
  2179 }
  2180 
  2181 template <class _CharT, class _Alloc>
  2182 inline rope<_CharT,_Alloc>
  2183 operator+ (const rope<_CharT,_Alloc>& __left,
  2184            const _CharT* __right) {
  2185   size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
  2186   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
  2187 }
  2188 
  2189 template <class _CharT, class _Alloc>
  2190 inline rope<_CharT,_Alloc>&
  2191 operator+= (rope<_CharT,_Alloc>& __left,
  2192             const _CharT* __right) {
  2193   __left.append(__right);
  2194   return __left;
  2195 }
  2196 
  2197 template <class _CharT, class _Alloc>
  2198 inline rope<_CharT,_Alloc>
  2199 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
  2200   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
  2201 }
  2202 
  2203 template <class _CharT, class _Alloc>
  2204 inline rope<_CharT,_Alloc>&
  2205 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
  2206   __left.append(__right);
  2207   return __left;
  2208 }
  2209 
  2210 template <class _CharT, class _Alloc>
  2211 inline bool
  2212 operator< (const rope<_CharT,_Alloc>& __left,
  2213            const rope<_CharT,_Alloc>& __right) {
  2214   return __left.compare(__right) < 0;
  2215 }
  2216 
  2217 template <class _CharT, class _Alloc>
  2218 inline bool
  2219 operator== (const rope<_CharT,_Alloc>& __left,
  2220             const rope<_CharT,_Alloc>& __right) {
  2221   return __left.compare(__right) == 0;
  2222 }
  2223 
  2224 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2225 
  2226 template <class _CharT, class _Alloc>
  2227 inline bool
  2228 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2229   return !(__x == __y);
  2230 }
  2231 
  2232 template <class _CharT, class _Alloc>
  2233 inline bool
  2234 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2235   return __y < __x;
  2236 }
  2237 
  2238 template <class _CharT, class _Alloc>
  2239 inline bool
  2240 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2241   return !(__y < __x);
  2242 }
  2243 
  2244 template <class _CharT, class _Alloc>
  2245 inline bool
  2246 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2247   return !(__x < __y);
  2248 }
  2249 
  2250 template <class _CharT, class _Alloc>
  2251 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  2252                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  2253   return !(__x == __y);
  2254 }
  2255 
  2256 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2257 
  2258 template <class _CharT, class _Alloc>
  2259 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  2260                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  2261   return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
  2262 }
  2263 
  2264 #if !defined (_STLP_USE_NO_IOSTREAMS)
  2265 template<class _CharT, class _Traits, class _Alloc>
  2266 basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
  2267                                             const rope<_CharT, _Alloc>& __r);
  2268 #endif
  2269 
  2270 typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
  2271 #if defined (_STLP_HAS_WCHAR_T)
  2272 typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
  2273 #endif
  2274 
  2275 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
  2276 { return __c.mutable_reference_at(__i); }
  2277 
  2278 #if defined (_STLP_HAS_WCHAR_T)
  2279 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
  2280 { return __c.mutable_reference_at(__i); }
  2281 #endif
  2282 
  2283 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
  2284 template <class _CharT, class _Alloc>
  2285 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
  2286 { __x.swap(__y); }
  2287 #else
  2288 
  2289 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
  2290 # ifdef _STLP_HAS_WCHAR_T  // dwa 8/21/97
  2291 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
  2292 # endif
  2293 
  2294 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
  2295 
  2296 
  2297 // Hash functions should probably be revisited later:
  2298 _STLP_TEMPLATE_NULL struct hash<crope> {
  2299   size_t operator()(const crope& __str) const {
  2300     size_t _p_size = __str.size();
  2301 
  2302     if (0 == _p_size) return 0;
  2303     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2304   }
  2305 };
  2306 
  2307 #if defined (_STLP_HAS_WCHAR_T)  // dwa 8/21/97
  2308 _STLP_TEMPLATE_NULL struct hash<wrope> {
  2309   size_t operator()(const wrope& __str) const {
  2310     size_t _p_size = __str.size();
  2311 
  2312     if (0 == _p_size) return 0;
  2313     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2314   }
  2315 };
  2316 #endif
  2317 
  2318 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
  2319 // I couldn't get this to work with VC++
  2320 template<class _CharT,class _Alloc>
  2321 #  if defined (__DMC__) && !defined (__PUT_STATIC_DATA_MEMBERS_HERE)
  2322 extern
  2323 #  endif
  2324 void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
  2325                   _Rope_iterator<_CharT, _Alloc> __middle,
  2326                   _Rope_iterator<_CharT, _Alloc> __last);
  2327 
  2328 inline void rotate(_Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __first,
  2329                    _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __middle,
  2330                    _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __last)
  2331 { _Rope_rotate(__first, __middle, __last); }
  2332 #endif
  2333 
  2334 template <class _CharT, class _Alloc>
  2335 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
  2336   if (_M_current_valid) {
  2337     return _M_current;
  2338   } else {
  2339     return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
  2340   }
  2341 }
  2342 
  2343 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION)
  2344 template <class _CharT, class _Alloc>
  2345 struct __move_traits<rope<_CharT, _Alloc> > {
  2346   typedef __stlp_movable implemented;
  2347   //Completness depends on the allocator:
  2348   typedef typename __move_traits<_Alloc>::complete complete;
  2349 };
  2350 #endif
  2351 
  2352 _STLP_END_NAMESPACE
  2353 
  2354 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
  2355 #  include <stl/_rope.c>
  2356 #endif
  2357 
  2358 #endif /* _STLP_INTERNAL_ROPE_H */
  2359 
  2360 // Local Variables:
  2361 // mode:C++
  2362 // End: