epoc32/include/stdapis/stlport/stl/_rope.h
author William Roberts <williamr@symbian.org>
Tue, 16 Mar 2010 16:12:26 +0000
branchSymbian2
changeset 2 2fe1408b6811
parent 0 061f57f2323e
permissions -rw-r--r--
Final list of Symbian^2 public API header files
     1 /*
     2  * © Portions copyright (c) 2006-2007 Nokia Corporation.  All rights reserved.
     3  * Copyright (c) 1996,1997
     4  * Silicon Graphics Computer Systems, Inc.
     5  *
     6  * Copyright (c) 1997
     7  * Moscow Center for SPARC Technology
     8  *
     9  * Copyright (c) 1999 
    10  * Boris Fomitchev
    11  *
    12  * This material is provided "as is", with absolutely no warranty expressed
    13  * or implied. Any use is at your own risk.
    14  *
    15  * Permission to use or copy this software for any purpose is hereby granted 
    16  * without fee, provided the above notices are retained on all copies.
    17  * Permission to modify the code and to distribute modified code is granted,
    18  * provided the above notices are retained, and a notice that the code was
    19  * modified is included with the above copyright notice.
    20  *
    21  */
    22 
    23 /* NOTE: This is an internal header file, included by other STL headers.
    24  *   You should not attempt to use it directly.
    25  */
    26 
    27 // rope<_CharT,_Alloc> is a sequence of _CharT.
    28 // Ropes appear to be mutable, but update operations
    29 // really copy enough of the data structure to leave the original
    30 // valid.  Thus ropes can be logically copied by just copying
    31 // a pointer value.
    32 
    33 #ifndef _STLP_INTERNAL_ROPE_H
    34 # define _STLP_INTERNAL_ROPE_H
    35 
    36 # ifndef _STLP_INTERNAL_ALGOBASE_H
    37 #  include <stl/_algobase.h>
    38 # endif
    39 
    40 # ifndef _STLP_IOSFWD
    41 #  include <iosfwd>
    42 # endif
    43 
    44 # ifndef _STLP_INTERNAL_ALLOC_H
    45 #  include <stl/_alloc.h>
    46 # endif
    47 
    48 # ifndef _STLP_INTERNAL_ITERATOR_H
    49 #  include <stl/_iterator.h>
    50 # endif
    51 
    52 # ifndef _STLP_INTERNAL_ALGO_H
    53 #  include <stl/_algo.h>
    54 # endif
    55 
    56 # ifndef _STLP_INTERNAL_FUNCTION_H
    57 #  include <stl/_function.h>
    58 # endif
    59 
    60 # ifndef _STLP_INTERNAL_NUMERIC_H
    61 #  include <stl/_numeric.h>
    62 # endif
    63 
    64 # ifndef _STLP_INTERNAL_HASH_FUN_H
    65 #  include <stl/_hash_fun.h>
    66 # endif
    67 
    68 # ifdef __GC
    69 #   define __GC_CONST const
    70 # else
    71 # include <stl/_threads.h>
    72 #   define __GC_CONST   // constant except for deallocation
    73 # endif
    74 # ifdef _STLP_SGI_THREADS
    75 #    include <mutex.h>
    76 # endif
    77 
    78 #ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM 
    79 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a)) 
    80 #elif defined(__MRC__)||defined(__SC__) 
    81 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0) 
    82 #else 
    83 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0) 
    84 #endif 
    85 
    86 _STLP_BEGIN_NAMESPACE
    87 
    88 // First a lot of forward declarations.  The standard seems to require
    89 // much stricter "declaration before use" than many of the implementations
    90 // that preceded it.
    91 template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
    92 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
    93 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
    94 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
    95 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
    96 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
    97 template<class _CharT, class _Alloc> class _Rope_iterator;
    98 template<class _CharT, class _Alloc> class _Rope_const_iterator;
    99 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
   100 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
   101 
   102 // Some helpers, so we can use power on ropes.
   103 // See below for why this isn't local to the implementation.
   104 
   105 // This uses a nonstandard refcount convention.
   106 // The result has refcount 0.
   107 template<class _CharT, class _Alloc>
   108 struct _Rope_Concat_fn
   109   : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
   110   rope<_CharT,_Alloc> > {
   111   rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
   112                                   const rope<_CharT,_Alloc>& __y) {
   113     return __x + __y;
   114   }
   115 };
   116 
   117 template <class _CharT, class _Alloc>
   118 inline
   119 rope<_CharT,_Alloc>
   120 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
   121 {
   122   return rope<_CharT,_Alloc>();
   123 }
   124 
   125 // The _S_eos function is used for those functions that
   126 // convert to/from C-like strings to detect the end of the string.
   127 
   128 // The end-of-C-string character.
   129 // This is what the draft standard says it should be.
   130 template <class _CharT>
   131 inline _CharT _S_eos(_CharT*) { return _CharT(); }
   132 
   133 // fbp : some compilers fail to zero-initialize builtins ;(
   134 inline const char _S_eos(const char*) { return 0; }
   135 # ifdef _STLP_HAS_WCHAR_T
   136 inline const wchar_t _S_eos(const wchar_t*) { return 0; }
   137 # endif
   138 
   139 // Test for basic character types.
   140 // For basic character types leaves having a trailing eos.
   141 template <class _CharT>
   142 inline bool _S_is_basic_char_type(_CharT*) { return false; }
   143 template <class _CharT>
   144 inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
   145 
   146 inline bool _S_is_basic_char_type(char*) { return true; }
   147 inline bool _S_is_one_byte_char_type(char*) { return true; }
   148 # ifdef _STLP_HAS_WCHAR_T
   149 inline bool _S_is_basic_char_type(wchar_t*) { return true; }
   150 # endif
   151 
   152 // Store an eos iff _CharT is a basic character type.
   153 // Do not reference _S_eos if it isn't.
   154 template <class _CharT>
   155 inline void _S_cond_store_eos(_CharT&) {}
   156 
   157 inline void _S_cond_store_eos(char& __c) { __c = 0; }
   158 # ifdef _STLP_HAS_WCHAR_T
   159 inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
   160 # endif
   161 
   162 // char_producers are logically functions that generate a section of
   163 // a string.  These can be convereted to ropes.  The resulting rope
   164 // invokes the char_producer on demand.  This allows, for example,
   165 // files to be viewed as ropes without reading the entire file.
   166 template <class _CharT>
   167 class char_producer {
   168 public:
   169   virtual ~char_producer() {};
   170   virtual void operator()(size_t __start_pos, size_t __len, 
   171                           _CharT* __buffer) = 0;
   172   // Buffer should really be an arbitrary output iterator.
   173   // That way we could flatten directly into an ostream, etc.
   174   // This is thoroughly impossible, since iterator types don't
   175   // have runtime descriptions.
   176 };
   177 
   178 // Sequence buffers:
   179 //
   180 // Sequence must provide an append operation that appends an
   181 // array to the sequence.  Sequence buffers are useful only if
   182 // appending an entire array is cheaper than appending element by element.
   183 // This is true for many string representations.
   184 // This should  perhaps inherit from ostream<sequence::value_type>
   185 // and be implemented correspondingly, so that they can be used
   186 // for formatted.  For the sake of portability, we don't do this yet.
   187 //
   188 // For now, sequence buffers behave as output iterators.  But they also
   189 // behave a little like basic_ostringstream<sequence::value_type> and a
   190 // little like containers.
   191 
   192 template<class _Sequence
   193 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
   194        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
   195 , size_t _Buf_sz = 100
   196 #   if defined(__sgi) && !defined(__GNUC__)
   197 #	 define __TYPEDEF_WORKAROUND
   198 ,class _V = typename _Sequence::value_type
   199 #   endif /* __sgi */
   200 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   201 >
   202 // The 3rd parameter works around a common compiler bug.
   203 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
   204 public:
   205 #       ifndef __TYPEDEF_WORKAROUND
   206   typedef typename _Sequence::value_type value_type;
   207   typedef sequence_buffer<_Sequence
   208 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
   209        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
   210   , _Buf_sz
   211   > _Self;
   212 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   213   > _Self;
   214   enum { _Buf_sz = 100}; 
   215 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
   216   // # endif
   217 #	else /* __TYPEDEF_WORKAROUND */
   218   typedef _V value_type;
   219   typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
   220 #	endif /* __TYPEDEF_WORKAROUND */
   221 protected:
   222   _Sequence* _M_prefix;
   223   value_type _M_buffer[_Buf_sz];
   224   size_t     _M_buf_count;
   225 public:
   226   void flush() {
   227     _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
   228     _M_buf_count = 0;
   229   }
   230   ~sequence_buffer() { flush(); }
   231   sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
   232   sequence_buffer(const _Self& __x) {
   233     _M_prefix = __x._M_prefix;
   234     _M_buf_count = __x._M_buf_count;
   235     copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
   236   }
   237   sequence_buffer(_Self& __x) {
   238     __x.flush();
   239     _M_prefix = __x._M_prefix;
   240     _M_buf_count = 0;
   241   }
   242   sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
   243   _Self& operator= (_Self& __x) {
   244     __x.flush();
   245     _M_prefix = __x._M_prefix;
   246     _M_buf_count = 0;
   247     return *this;
   248   }
   249   _Self& operator= (const _Self& __x) {
   250     _M_prefix = __x._M_prefix;
   251     _M_buf_count = __x._M_buf_count;
   252     copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
   253     return *this;
   254   }
   255   void push_back(value_type __x)
   256   {
   257     if (_M_buf_count < _Buf_sz) {
   258       _M_buffer[_M_buf_count] = __x;
   259       ++_M_buf_count;
   260     } else {
   261       flush();
   262       _M_buffer[0] = __x;
   263       _M_buf_count = 1;
   264     }
   265   }
   266   void append(value_type* __s, size_t __len)
   267   {
   268     if (__len + _M_buf_count <= _Buf_sz) {
   269       size_t __i = _M_buf_count;
   270       size_t __j = 0;
   271       for (; __j < __len; __i++, __j++) {
   272         _M_buffer[__i] = __s[__j];
   273       }
   274       _M_buf_count += __len;
   275     } else if (0 == _M_buf_count) {
   276       _M_prefix->append(__s, __s + __len);
   277     } else {
   278       flush();
   279       append(__s, __len);
   280     }
   281   }
   282   _Self& write(value_type* __s, size_t __len)
   283   {
   284     append(__s, __len);
   285     return *this;
   286   }
   287   _Self& put(value_type __x)
   288   {
   289     push_back(__x);
   290     return *this;
   291   }
   292   _Self& operator=(const value_type& __rhs)
   293   {
   294     push_back(__rhs);
   295     return *this;
   296   }
   297   _Self& operator*() { return *this; }
   298   _Self& operator++() { return *this; }
   299   _Self& operator++(int) { return *this; }
   300 };
   301 
   302 // The following should be treated as private, at least for now.
   303 template<class _CharT>
   304 class _Rope_char_consumer {
   305 public:
   306   // If we had member templates, these should not be virtual.
   307   // For now we need to use run-time parametrization where
   308   // compile-time would do.  _Hence this should all be private
   309   // for now.
   310   // The symmetry with char_producer is accidental and temporary.
   311   virtual ~_Rope_char_consumer() {};
   312   virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
   313 };
   314 
   315 //
   316 // What follows should really be local to rope.  Unfortunately,
   317 // that doesn't work, since it makes it impossible to define generic
   318 // equality on rope iterators.  According to the draft standard, the
   319 // template parameters for such an equality operator cannot be inferred
   320 // from the occurence of a member class as a parameter.
   321 // (SGI compilers in fact allow this, but the __result wouldn't be
   322 // portable.)
   323 // Similarly, some of the static member functions are member functions
   324 // only to avoid polluting the global namespace, and to circumvent
   325 // restrictions on type inference for template functions.
   326 //
   327 
   328 //
   329 // The internal data structure for representing a rope.  This is
   330 // private to the implementation.  A rope is really just a pointer
   331 // to one of these.
   332 //
   333 // A few basic functions for manipulating this data structure
   334 // are members of _RopeRep.  Most of the more complex algorithms
   335 // are implemented as rope members.
   336 //
   337 // Some of the static member functions of _RopeRep have identically
   338 // named functions in rope that simply invoke the _RopeRep versions.
   339 //
   340 // A macro to introduce various allocation and deallocation functions
   341 // These need to be defined differently depending on whether or not
   342 // we are using standard conforming allocators, and whether the allocator
   343 // instances have real state.  Thus this macro is invoked repeatedly
   344 // with different definitions of __ROPE_DEFINE_ALLOC.
   345 
   346 #if defined (_STLP_MEMBER_TEMPLATE_CLASSES)
   347 # define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy) \
   348         typedef typename \
   349           _Alloc_traits<_Tp,_Alloc>::allocator_type __name##Allocator;
   350 
   351 #define __ROPE_DEFINE_ALLOCS(__a, _M_proxy) \
   352         __ROPE_DEFINE_ALLOC(_CharT,_Data, _M_proxy) /* character data */ \
   353         typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
   354         __ROPE_DEFINE_ALLOC(__C,_C, _M_proxy) \
   355         typedef _Rope_RopeLeaf<_CharT,__a> __L; \
   356         __ROPE_DEFINE_ALLOC(__L,_L, _M_proxy) \
   357         typedef _Rope_RopeFunction<_CharT,__a> __F; \
   358         __ROPE_DEFINE_ALLOC(__F,_F, _M_proxy) \
   359         typedef _Rope_RopeSubstring<_CharT,__a> __S; \
   360         __ROPE_DEFINE_ALLOC(__S,_S,_M_proxy)
   361 #else
   362 #define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy) 
   363 #define __ROPE_DEFINE_ALLOCS(__a, _M_proxy)
   364 #endif
   365 
   366 
   367 template<class _CharT, class _Alloc>
   368 struct _Rope_RopeRep
   369 # ifndef __GC
   370   : public _Refcount_Base
   371 # endif
   372 {
   373   typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
   374 public:
   375 #  define __ROPE_MAX_DEPTH  45
   376 #  define __ROPE_DEPTH_SIZE 46
   377   enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
   378   enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
   379   // Apparently needed by VC++
   380   // The data fields of leaves are allocated with some
   381   // extra space, to accomodate future growth and for basic
   382   // character types, to hold a trailing eos character.
   383   enum { _S_alloc_granularity = 8 };
   384 
   385   
   386   _Tag _M_tag:8;
   387   bool _M_is_balanced:8;
   388 
   389   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   390   typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type
   391   allocator_type;
   392   
   393   allocator_type get_allocator() const { return allocator_type(_M_size);  }
   394 
   395   unsigned char _M_depth;
   396   __GC_CONST _CharT* _M_c_string;
   397   _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
   398 
   399 # ifdef _STLP_NO_ARROW_OPERATOR
   400   _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
   401 # endif
   402 
   403   /* Flattened version of string, if needed.  */
   404   /* typically 0.                             */
   405   /* If it's not 0, then the memory is owned  */
   406   /* by this node.                            */
   407   /* In the case of a leaf, this may point to */
   408   /* the same memory as the data field.       */
   409   _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t _p_size,
   410                 allocator_type __a) :
   411 #         ifndef __GC
   412     _Refcount_Base(1),
   413 #	  endif
   414     _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
   415   { }
   416 #   ifdef __GC
   417   void _M_incr () {}
   418 #   endif
   419 
   420   // fbp : moved from RopeLeaf
   421   static size_t _S_rounded_up_size(size_t __n) {
   422     size_t __size_with_eos;
   423     
   424     if (_S_is_basic_char_type((_CharT*)0)) {
   425       __size_with_eos = __n + 1;
   426     } else {
   427       __size_with_eos = __n;
   428     }
   429 #       ifdef __GC
   430     return __size_with_eos;
   431 #       else
   432     // Allow slop for in-place expansion.
   433     return (__size_with_eos + _S_alloc_granularity-1)
   434       &~ (_S_alloc_granularity-1);
   435 #       endif
   436   }
   437 
   438   static void _S_free_string(__GC_CONST _CharT* __s, size_t __len,
   439                              allocator_type __a) {
   440 
   441     if (!_S_is_basic_char_type((_CharT*)0)) {
   442       _STLP_STD::_Destroy(__s, __s + __len);
   443     }
   444     //  This has to be a static member, so this gets a bit messy
   445 #   ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
   446     __a.deallocate(__s, _S_rounded_up_size(__len));		//*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
   447 #   else
   448     __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
   449 #   endif
   450   }
   451   
   452   // Deallocate data section of a leaf.
   453   // This shouldn't be a member function.
   454   // But its hard to do anything else at the
   455   // moment, because it's templatized w.r.t.
   456   // an allocator.
   457   // Does nothing if __GC is defined.
   458 #   ifndef __GC
   459   void _M_free_c_string();
   460   void _M_free_tree();
   461   // Deallocate t. Assumes t is not 0.
   462   void _M_unref_nonnil()
   463   {
   464     _M_decr(); if (!_M_ref_count) _M_free_tree();
   465   }
   466   void _M_ref_nonnil()
   467   {
   468     _M_incr();
   469   }
   470   static void _S_unref(_Self* __t)
   471   {
   472     if (0 != __t) {
   473       __t->_M_unref_nonnil();
   474     }
   475   }
   476   static void _S_ref(_Self* __t)
   477   {
   478     if (0 != __t) __t->_M_incr();
   479   }
   480   static void _S_free_if_unref(_Self* __t)
   481   {
   482     if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
   483   }
   484 #   else /* __GC */
   485   void _M_unref_nonnil() {}
   486   void _M_ref_nonnil() {}
   487   static void _S_unref(_Self*) {}
   488   static void _S_ref(_Self*) {}
   489   static void _S_free_if_unref(_Self*) {}
   490 #   endif
   491 
   492   __ROPE_DEFINE_ALLOCS(_Alloc, _M_size)
   493     };
   494 
   495 template<class _CharT, class _Alloc>
   496 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
   497 public:
   498   __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
   499                                 /* The allocated size is         */
   500                                 /* _S_rounded_up_size(size), except */
   501                                 /* in the GC case, in which it   */
   502                                 /* doesn't matter.               */
   503   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   504   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
   505   _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t _p_size, allocator_type __a)
   506     : _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_leaf, 0, true, _p_size, __a), 
   507     _M_data(__d)
   508   {
   509     _STLP_ASSERT(_p_size > 0)
   510     if (_S_is_basic_char_type((_CharT *)0)) {
   511       // already eos terminated.
   512       this->_M_c_string = __d;
   513     }
   514   }
   515 
   516 # ifdef _STLP_NO_ARROW_OPERATOR
   517   _Rope_RopeLeaf() {}
   518   _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
   519 # endif
   520   
   521 // The constructor assumes that d has been allocated with
   522   // the proper allocator and the properly padded size.
   523   // In contrast, the destructor deallocates the data:
   524 # ifndef __GC
   525   ~_Rope_RopeLeaf() {
   526     if (_M_data != this->_M_c_string) {
   527       this->_M_free_c_string();
   528     }
   529     _Rope_RopeRep<_CharT,_Alloc>::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
   530   }
   531 # endif
   532 };
   533 
   534 template<class _CharT, class _Alloc>
   535 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
   536 public:
   537   _Rope_RopeRep<_CharT,_Alloc>* _M_left;
   538   _Rope_RopeRep<_CharT,_Alloc>* _M_right;
   539   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   540   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
   541   _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
   542                           _Rope_RopeRep<_CharT,_Alloc>* __r,
   543                           allocator_type __a)
   544     :   _Rope_RopeRep<_CharT,_Alloc>(
   545                                      _Rope_RopeRep<_CharT,_Alloc>::_S_concat, 
   546 				     (max)(__l->_M_depth, __r->_M_depth) + 1, false,
   547                                      __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
   548   {}
   549 # ifdef _STLP_NO_ARROW_OPERATOR
   550   _Rope_RopeConcatenation() {}
   551   _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
   552 # endif
   553 
   554 # ifndef __GC
   555   ~_Rope_RopeConcatenation() {
   556     this->_M_free_c_string();
   557     _M_left->_M_unref_nonnil();
   558     _M_right->_M_unref_nonnil();
   559   }
   560 # endif
   561 };
   562 
   563 template<class _CharT, class _Alloc>
   564 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
   565 public:
   566   char_producer<_CharT>* _M_fn;
   567 #   ifndef __GC
   568   bool _M_delete_when_done; // Char_producer is owned by the
   569                                 // rope and should be explicitly
   570                                 // deleted when the rope becomes
   571                                 // inaccessible.
   572 #   else
   573   // In the GC case, we either register the rope for
   574   // finalization, or not.  Thus the field is unnecessary;
   575   // the information is stored in the collector data structures.
   576   // We do need a finalization procedure to be invoked by the
   577   // collector.
   578   static void _S_fn_finalization_proc(void * __tree, void *) {
   579     delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
   580   }
   581 #   endif
   582   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   583   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
   584 # ifdef _STLP_NO_ARROW_OPERATOR
   585   _Rope_RopeFunction() {}
   586   _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
   587 # endif
   588 
   589   _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
   590                      bool __d, allocator_type __a)
   591     :
   592     _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_function, 0, true, _p_size, __a),
   593     _M_fn(__f)
   594 #       ifndef __GC
   595     , _M_delete_when_done(__d)
   596 #       endif
   597   {
   598     _STLP_ASSERT(_p_size > 0)
   599 #       ifdef __GC
   600     if (__d) {
   601       GC_REGISTER_FINALIZER(
   602                             this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
   603     }
   604 #       endif
   605   }
   606 # ifndef __GC
   607   ~_Rope_RopeFunction() {
   608     this->_M_free_c_string();
   609     if (_M_delete_when_done) {
   610       delete _M_fn;
   611     }
   612   }
   613 # endif
   614 };
   615 // Substring results are usually represented using just
   616 // concatenation nodes.  But in the case of very long flat ropes
   617 // or ropes with a functional representation that isn't practical.
   618 // In that case, we represent the __result as a special case of
   619 // RopeFunction, whose char_producer points back to the rope itself.
   620 // In all cases except repeated substring operations and
   621 // deallocation, we treat the __result as a RopeFunction.
   622 template<class _CharT, class _Alloc>
   623 # if  ( defined (__IBMCPP__) && (__IBMCPP__ == 500) )  // JFA 10-Aug-2000 for some reason xlC cares about the order
   624 struct _Rope_RopeSubstring : public char_producer<_CharT> , public _Rope_RopeFunction<_CharT,_Alloc>
   625 # else
   626 struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
   627                              public char_producer<_CharT>
   628 # endif
   629 {
   630 public:
   631   // XXX this whole class should be rewritten.
   632   typedef _Rope_RopeRep<_CharT,_Alloc> _Base;
   633   _Rope_RopeRep<_CharT,_Alloc>* _M_base;      // not 0
   634   size_t _M_start;
   635   virtual void operator()(size_t __start_pos, size_t __req_len,
   636                           _CharT* __buffer) {
   637     switch(_M_base->_M_tag) {
   638     case _Base::_S_function:
   639     case _Base::_S_substringfn:
   640       {
   641         char_producer<_CharT>* __fn =
   642           ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
   643         _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
   644         _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
   645         (*__fn)(__start_pos + _M_start, __req_len, __buffer);
   646       }
   647       break;
   648     case _Base::_S_leaf:
   649       {
   650         __GC_CONST _CharT* __s =
   651           ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
   652         uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
   653                              __buffer);
   654       }
   655       break;
   656     default:
   657       _STLP_ASSERT(false)
   658         ;
   659     }
   660   }
   661 
   662   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   663   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
   664 
   665   _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
   666                       size_t __l, allocator_type __a)
   667     : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
   668 	_M_base(__b),
   669     _M_start(__s)
   670        
   671   {
   672     _STLP_ASSERT(__l > 0)
   673     _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
   674 #       ifndef __GC
   675     _M_base->_M_ref_nonnil();
   676 #       endif
   677     this->_M_tag = _Base::_S_substringfn;
   678   }
   679   virtual ~_Rope_RopeSubstring()
   680   { 
   681 #       ifndef __GC
   682     _M_base->_M_unref_nonnil();
   683 #       endif
   684   }
   685 };
   686 
   687 // Self-destructing pointers to Rope_rep.
   688 // These are not conventional smart pointers.  Their
   689 // only purpose in life is to ensure that unref is called
   690 // on the pointer either at normal exit or if an exception
   691 // is raised.  It is the caller's responsibility to
   692 // adjust reference counts when these pointers are initialized
   693 // or assigned to.  (This convention significantly reduces
   694 // the number of potentially expensive reference count
   695 // updates.)
   696 #ifndef __GC
   697 template<class _CharT, class _Alloc>
   698 struct _Rope_self_destruct_ptr {
   699   _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
   700   ~_Rope_self_destruct_ptr() 
   701   { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
   702 #   ifdef _STLP_USE_EXCEPTIONS
   703   _Rope_self_destruct_ptr() : _M_ptr(0) {};
   704 #   else
   705   _Rope_self_destruct_ptr() {};
   706 #   endif
   707   _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
   708   _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
   709   _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
   710   operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
   711   _Rope_self_destruct_ptr<_CharT, _Alloc>& 
   712   operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
   713   { _M_ptr = __x; return *this; }
   714 };
   715 #endif
   716 
   717 // Dereferencing a nonconst iterator has to return something
   718 // that behaves almost like a reference.  It's not possible to
   719 // return an actual reference since assignment requires extra
   720 // work.  And we would get into the same problems as with the
   721 // CD2 version of basic_string.
   722 template<class _CharT, class _Alloc>
   723 class _Rope_char_ref_proxy {
   724   typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
   725   friend class rope<_CharT,_Alloc>;
   726   friend class _Rope_iterator<_CharT,_Alloc>;
   727   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
   728 #   ifdef __GC
   729   typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
   730 #   else
   731   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
   732 #   endif
   733   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   734   typedef rope<_CharT,_Alloc> _My_rope;
   735   size_t _M_pos;
   736   _CharT _M_current;
   737   bool _M_current_valid;
   738   _My_rope* _M_root;     // The whole rope.
   739 public:
   740   _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
   741     _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
   742   _Rope_char_ref_proxy(const _Self& __x) :
   743     _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
   744   // Don't preserve cache if the reference can outlive the
   745   // expression.  We claim that's not possible without calling
   746   // a copy constructor or generating reference to a proxy
   747   // reference.  We declare the latter to have undefined semantics.
   748   _Rope_char_ref_proxy(_My_rope* __r, size_t __p,
   749                        _CharT __c) :
   750     _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
   751   inline operator _CharT () const;
   752   _Self& operator= (_CharT __c);
   753   _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
   754   _Self& operator= (const _Self& __c) {
   755     return operator=((_CharT)__c); 
   756   }
   757 };
   758 
   759 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   760 template<class _CharT, class __Alloc>
   761 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
   762                  _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
   763   _CharT __tmp = __a;
   764   __a = __b;
   765   __b = __tmp;
   766 }
   767 #else
   768 // There is no really acceptable way to handle this.  The default
   769 // definition of swap doesn't work for proxy references.
   770 // It can't really be made to work, even with ugly hacks, since
   771 // the only unusual operation it uses is the copy constructor, which
   772 // is needed for other purposes.  We provide a macro for
   773 // full specializations, and instantiate the most common case.
   774 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
   775     inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
   776                      _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
   777         _CharT __tmp = __a; \
   778         __a = __b; \
   779         __b = __tmp; \
   780     }
   781 
   782 _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
   783 
   784 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
   785 
   786   template<class _CharT, class _Alloc>
   787 class _Rope_char_ptr_proxy {
   788   // XXX this class should be rewritten.
   789 public:
   790   typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
   791   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
   792   size_t _M_pos;
   793   rope<_CharT,_Alloc>* _M_root;     // The whole rope.
   794 
   795   _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x) 
   796     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
   797   _Rope_char_ptr_proxy(const _Self& __x)
   798     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
   799   _Rope_char_ptr_proxy() {}
   800   _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
   801     _STLP_ASSERT(0 == __x)
   802   }
   803   _Self& 
   804   operator= (const _Self& __x) {
   805     _M_pos = __x._M_pos;
   806     _M_root = __x._M_root;
   807     return *this;
   808   }
   809 
   810   _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
   811     return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
   812   }
   813 };
   814 
   815 
   816 // Rope iterators:
   817 // Unlike in the C version, we cache only part of the stack
   818 // for rope iterators, since they must be efficiently copyable.
   819 // When we run out of cache, we have to reconstruct the iterator
   820 // value.
   821 // Pointers from iterators are not included in reference counts.
   822 // Iterators are assumed to be thread private.  Ropes can
   823 // be shared.
   824 
   825 template<class _CharT, class _Alloc>
   826 class _Rope_iterator_base
   827 /*   : public random_access_iterator<_CharT, ptrdiff_t>  */
   828 {
   829   friend class rope<_CharT,_Alloc>;
   830   typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
   831 public:
   832   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   833   // Borland doesnt want this to be protected.
   834   //  protected:
   835   enum { _S_path_cache_len = 4 }; // Must be <= 9.
   836   enum { _S_iterator_buf_len = 15 };
   837   size_t _M_current_pos;
   838   _RopeRep* _M_root;     // The whole rope.
   839   size_t _M_leaf_pos;    // Starting position for current leaf
   840   __GC_CONST _CharT* _M_buf_start;
   841   // Buffer possibly
   842   // containing current char.
   843   __GC_CONST _CharT* _M_buf_ptr;
   844   // Pointer to current char in buffer.
   845   // != 0 ==> buffer valid.
   846   __GC_CONST _CharT* _M_buf_end;
   847   // One past __last valid char in buffer.
   848   // What follows is the path cache.  We go out of our
   849   // way to make this compact.
   850   // Path_end contains the bottom section of the path from
   851   // the root to the current leaf.
   852   const _RopeRep* _M_path_end[_S_path_cache_len];
   853   int _M_leaf_index;     // Last valid __pos in path_end;
   854   // _M_path_end[0] ... _M_path_end[leaf_index-1]
   855   // point to concatenation nodes.
   856   unsigned char _M_path_directions;
   857   // (path_directions >> __i) & 1 is 1
   858   // iff we got from _M_path_end[leaf_index - __i - 1]
   859   // to _M_path_end[leaf_index - __i] by going to the
   860   // __right. Assumes path_cache_len <= 9.
   861   _CharT _M_tmp_buf[_S_iterator_buf_len];
   862   // Short buffer for surrounding chars.
   863   // This is useful primarily for 
   864   // RopeFunctions.  We put the buffer
   865   // here to avoid locking in the
   866   // multithreaded case.
   867   // The cached path is generally assumed to be valid
   868   // only if the buffer is valid.
   869   static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
   870   // Set buffer contents given
   871   // path cache.
   872   static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
   873   // Set buffer contents and
   874   // path cache.
   875   static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
   876   // As above, but assumes path
   877   // cache is valid for previous posn.
   878   _Rope_iterator_base() {}
   879   _Rope_iterator_base(_RopeRep* __root, size_t __pos)
   880     : _M_current_pos(__pos),_M_root(__root),  _M_buf_ptr(0) {}
   881   void _M_incr(size_t __n);
   882   void _M_decr(size_t __n);
   883 public:
   884   size_t index() const { return _M_current_pos; }
   885   _Rope_iterator_base(const _Self& __x) {
   886     if (0 != __x._M_buf_ptr) {
   887       *this = __x;
   888     } else {
   889       _M_current_pos = __x._M_current_pos;
   890       _M_root = __x._M_root;
   891       _M_buf_ptr = 0;
   892     }
   893   }
   894 };
   895 
   896 template<class _CharT, class _Alloc> class _Rope_iterator;
   897 
   898 template<class _CharT, class _Alloc>
   899 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
   900   friend class rope<_CharT,_Alloc>;
   901   typedef  _Rope_const_iterator<_CharT, _Alloc> _Self;
   902   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
   903   //  protected:
   904 public:
   905 #   ifndef _STLP_HAS_NO_NAMESPACES
   906   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   907   // The one from the base class may not be directly visible.
   908 #   endif
   909   _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
   910     _Rope_iterator_base<_CharT,_Alloc>(
   911                                        __CONST_CAST(_RopeRep*,__root), __pos)
   912     // Only nonconst iterators modify root ref count
   913   {}
   914 public:
   915   typedef _CharT reference;   // Really a value.  Returning a reference
   916                                 // Would be a mess, since it would have
   917                                 // to be included in refcount.
   918   typedef const _CharT* pointer;
   919   typedef _CharT value_type;
   920   typedef ptrdiff_t difference_type;
   921   typedef random_access_iterator_tag iterator_category;
   922 
   923 public:
   924   _Rope_const_iterator() {};
   925   _Rope_const_iterator(const _Self& __x) :
   926     _Rope_iterator_base<_CharT,_Alloc>(__x) { }
   927   _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x): 
   928     _Rope_iterator_base<_CharT,_Alloc>(__x) {}
   929   _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
   930     _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
   931   _Self& operator= (const _Self& __x) {
   932     if (0 != __x._M_buf_ptr) {
   933       *(__STATIC_CAST(_Base*,this)) = __x;
   934     } else {
   935       this->_M_current_pos = __x._M_current_pos;
   936       this->_M_root = __x._M_root;
   937       this->_M_buf_ptr = 0;
   938     }
   939     return(*this);
   940   }
   941   reference operator*() {
   942     if (0 == this->_M_buf_ptr) _S_setcache(*this);
   943     return *(this->_M_buf_ptr);
   944   }
   945   _Self& operator++() {
   946     __GC_CONST _CharT* __next;
   947     if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
   948       this->_M_buf_ptr = __next;
   949       ++this->_M_current_pos;
   950     } else {
   951       this->_M_incr(1);
   952     }
   953     return *this;
   954   }
   955   _Self& operator+=(ptrdiff_t __n) {
   956     if (__n >= 0) {
   957       this->_M_incr(__n);
   958     } else {
   959       this->_M_decr(-__n);
   960     }
   961     return *this;
   962   }
   963   _Self& operator--() {
   964     this->_M_decr(1);
   965     return *this;
   966   }
   967   _Self& operator-=(ptrdiff_t __n) {
   968     if (__n >= 0) {
   969       this->_M_decr(__n);
   970     } else {
   971       this->_M_incr(-__n);
   972     }
   973     return *this;
   974   }
   975   _Self operator++(int) {
   976     size_t __old_pos = this->_M_current_pos;
   977     this->_M_incr(1);
   978     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
   979     // This makes a subsequent dereference expensive.
   980     // Perhaps we should instead copy the iterator
   981     // if it has a valid cache?
   982   }
   983   _Self operator--(int) {
   984     size_t __old_pos = this->_M_current_pos;
   985     this->_M_decr(1);
   986     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
   987   }
   988   inline reference operator[](size_t __n);
   989 };
   990 
   991 template<class _CharT, class _Alloc>
   992 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
   993   friend class rope<_CharT,_Alloc>;
   994   typedef _Rope_iterator<_CharT, _Alloc> _Self;
   995   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
   996   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
   997   //  protected:
   998 public:
   999   rope<_CharT,_Alloc>* _M_root_rope;
  1000   // root is treated as a cached version of this,
  1001   // and is used to detect changes to the underlying
  1002   // rope.
  1003   // Root is included in the reference count.
  1004   // This is necessary so that we can detect changes reliably.
  1005   // Unfortunately, it requires careful bookkeeping for the
  1006   // nonGC case.
  1007   _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
  1008   
  1009   void _M_check();
  1010 public:
  1011   typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
  1012   typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
  1013   typedef _CharT value_type;
  1014   typedef ptrdiff_t difference_type;
  1015   typedef random_access_iterator_tag iterator_category;
  1016 public:
  1017   ~_Rope_iterator() 		//*TY 5/6/00 - added dtor to balance reference count
  1018   {
  1019     _RopeRep::_S_unref(this->_M_root);
  1020   }
  1021   
  1022   rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
  1023   _Rope_iterator() {
  1024     this->_M_root = 0;  // Needed for reference counting.
  1025   };
  1026   _Rope_iterator(const  _Self& __x) :
  1027     _Rope_iterator_base<_CharT,_Alloc>(__x) {
  1028     _M_root_rope = __x._M_root_rope;
  1029     _RopeRep::_S_ref(this->_M_root);
  1030   }
  1031   _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
  1032   _Self& operator= (const  _Self& __x) {
  1033     _RopeRep* __old = this->_M_root;
  1034     
  1035     _RopeRep::_S_ref(__x._M_root);
  1036     if (0 != __x._M_buf_ptr) {
  1037       _M_root_rope = __x._M_root_rope;
  1038       *(__STATIC_CAST(_Base*,this)) = __x;
  1039     } else {
  1040       this->_M_current_pos = __x._M_current_pos;
  1041       this->_M_root = __x._M_root;
  1042       _M_root_rope = __x._M_root_rope;
  1043       this->_M_buf_ptr = 0;
  1044     }
  1045     _RopeRep::_S_unref(__old);
  1046     return(*this);
  1047   }
  1048   reference operator*() {
  1049     _M_check();
  1050     if (0 == this->_M_buf_ptr) {
  1051       return _Rope_char_ref_proxy<_CharT,_Alloc>(
  1052                                                  _M_root_rope, this->_M_current_pos);
  1053     } else {
  1054       return _Rope_char_ref_proxy<_CharT,_Alloc>(
  1055                                                  _M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
  1056     }
  1057   }
  1058   _Self& operator++() {
  1059     this->_M_incr(1);
  1060     return *this;
  1061   }
  1062   _Self& operator+=(ptrdiff_t __n) {
  1063     if (__n >= 0) {
  1064       this->_M_incr(__n);
  1065     } else {
  1066       this->_M_decr(-__n);
  1067     }
  1068     return *this;
  1069   }
  1070   _Self& operator--() {
  1071     this->_M_decr(1);
  1072     return *this;
  1073   }
  1074   _Self& operator-=(ptrdiff_t __n) {
  1075     if (__n >= 0) {
  1076       this->_M_decr(__n);
  1077     } else {
  1078       this->_M_incr(-__n);
  1079     }
  1080     return *this;
  1081   }
  1082   _Self operator++(int) {
  1083     size_t __old_pos = this->_M_current_pos;
  1084     this->_M_incr(1);
  1085     return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
  1086   }
  1087   _Self operator--(int) {
  1088     size_t __old_pos = this->_M_current_pos;
  1089     this->_M_decr(1);
  1090     return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
  1091   }
  1092   reference operator[](ptrdiff_t __n) {
  1093     return _Rope_char_ref_proxy<_CharT,_Alloc>(
  1094                                                _M_root_rope, this->_M_current_pos + __n);
  1095   }
  1096 };
  1097 
  1098 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
  1099 template <class _CharT, class _Alloc>
  1100 inline random_access_iterator_tag
  1101 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) {  return random_access_iterator_tag();}
  1102 template <class _CharT, class _Alloc>
  1103 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  1104 template <class _CharT, class _Alloc>
  1105 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  1106 template <class _CharT, class _Alloc>
  1107 inline random_access_iterator_tag
  1108 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
  1109 template <class _CharT, class _Alloc>
  1110 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  1111 template <class _CharT, class _Alloc>
  1112 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  1113 #endif
  1114 
  1115 template <class _CharT, class _Alloc>
  1116 class rope {
  1117   typedef rope<_CharT,_Alloc> _Self;
  1118 public:
  1119   typedef _CharT value_type;
  1120   typedef ptrdiff_t difference_type;
  1121   typedef size_t size_type;
  1122   typedef _CharT const_reference;
  1123   typedef const _CharT* const_pointer;
  1124   typedef _Rope_iterator<_CharT,_Alloc> iterator;
  1125   typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
  1126   typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
  1127   typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
  1128   
  1129   friend class _Rope_iterator<_CharT,_Alloc>;
  1130   friend class _Rope_const_iterator<_CharT,_Alloc>;
  1131   friend struct _Rope_RopeRep<_CharT,_Alloc>;
  1132   friend class _Rope_iterator_base<_CharT,_Alloc>;
  1133   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
  1134   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
  1135   friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
  1136 
  1137   _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
  1138   
  1139 protected:
  1140   typedef __GC_CONST _CharT* _Cstrptr;
  1141   
  1142   static _CharT _S_empty_c_str[1];
  1143   
  1144   static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
  1145   enum { _S_copy_max = 23 };
  1146   // For strings shorter than _S_copy_max, we copy to
  1147   // concatenate.
  1148   
  1149 public:
  1150   typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
  1151   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  1152   typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type  allocator_type;
  1153   allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
  1154 public:
  1155   // The only data member of a rope:
  1156   _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
  1157 
  1158   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
  1159   typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
  1160   typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
  1161   typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
  1162 
  1163 
  1164 
  1165   // Retrieve a character at the indicated position.
  1166   static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
  1167 
  1168 #       ifndef __GC
  1169   // Obtain a pointer to the character at the indicated position.
  1170   // The pointer can be used to change the character.
  1171   // If such a pointer cannot be produced, as is frequently the
  1172   // case, 0 is returned instead.
  1173   // (Returns nonzero only if all nodes in the path have a refcount
  1174   // of 1.)
  1175   static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
  1176 #       endif
  1177 
  1178   static bool _S_apply_to_pieces(
  1179                                 // should be template parameter
  1180                                  _Rope_char_consumer<_CharT>& __c,
  1181                                  const _RopeRep* __r,
  1182                                  size_t __begin, size_t __end);
  1183                                 // begin and end are assumed to be in range.
  1184 
  1185 #       ifndef __GC
  1186   static void _S_unref(_RopeRep* __t)
  1187   {
  1188     _RopeRep::_S_unref(__t);
  1189   }
  1190   static void _S_ref(_RopeRep* __t)
  1191   {
  1192     _RopeRep::_S_ref(__t);
  1193   }
  1194 #       else /* __GC */
  1195   static void _S_unref(_RopeRep*) {}
  1196   static void _S_ref(_RopeRep*) {}
  1197 #       endif
  1198 
  1199 
  1200 #       ifdef __GC
  1201   typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
  1202 #       else
  1203   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
  1204 #       endif
  1205 
  1206   // _Result is counted in refcount.
  1207   static _RopeRep* _S_substring(_RopeRep* __base,
  1208                                 size_t __start, size_t __endp1);
  1209 
  1210   static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
  1211                                        const _CharT* __iter, size_t __slen);
  1212   // Concatenate rope and char ptr, copying __s.
  1213   // Should really take an arbitrary iterator.
  1214   // Result is counted in refcount.
  1215   static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
  1216                                              const _CharT* __iter, size_t __slen)
  1217     // As above, but one reference to __r is about to be
  1218     // destroyed.  Thus the pieces may be recycled if all
  1219     // relevent reference counts are 1.
  1220 #           ifdef __GC
  1221     // We can't really do anything since refcounts are unavailable.
  1222   { return _S_concat_char_iter(__r, __iter, __slen); }
  1223 #           else
  1224   ;
  1225 #           endif
  1226 
  1227   static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
  1228   // General concatenation on _RopeRep.  _Result
  1229   // has refcount of 1.  Adjusts argument refcounts.
  1230 
  1231 public:
  1232   void apply_to_pieces( size_t __begin, size_t __end,
  1233                         _Rope_char_consumer<_CharT>& __c) const {
  1234     _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end);
  1235   }
  1236 
  1237 
  1238 protected:
  1239 
  1240   static size_t _S_rounded_up_size(size_t __n) {
  1241     return _RopeRep::_S_rounded_up_size(__n);
  1242   }
  1243 
  1244   static size_t _S_allocated_capacity(size_t __n) {
  1245     if (_S_is_basic_char_type((_CharT*)0)) {
  1246       return _S_rounded_up_size(__n) - 1;
  1247     } else {
  1248       return _S_rounded_up_size(__n);
  1249     }
  1250   }
  1251                 
  1252   // Allocate and construct a RopeLeaf using the supplied allocator
  1253   // Takes ownership of s instead of copying.
  1254   static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
  1255                                     size_t _p_size, allocator_type __a)
  1256   {
  1257    _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _RopeLeaf).allocate(1,(const void*)0);
  1258     _STLP_TRY {
  1259       _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
  1260     }
  1261    _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a, 
  1262                                    _RopeLeaf).deallocate(__space, 1))
  1263 	  return __space;
  1264   }
  1265 
  1266   static _RopeConcatenation* _S_new_RopeConcatenation(
  1267                                                       _RopeRep* __left, _RopeRep* __right,
  1268                                                       allocator_type __a)
  1269   {
  1270    _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
  1271                                                     _RopeConcatenation).allocate(1,(const void*)0);
  1272     return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
  1273   }
  1274 
  1275   static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
  1276                                             size_t _p_size, bool __d, allocator_type __a)
  1277   {
  1278    _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a, 
  1279                                                _RopeFunction).allocate(1,(const void*)0);
  1280     return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
  1281   }
  1282 
  1283   static _RopeSubstring* _S_new_RopeSubstring(
  1284                                               _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
  1285                                               size_t __l, allocator_type __a)
  1286   {
  1287    _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a, 
  1288                                                 _RopeSubstring).allocate(1,(const void*)0);
  1289     return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
  1290   }
  1291 
  1292 #         define _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _p_size, __a) \
  1293                 _S_RopeLeaf_from_unowned_char_ptr(__s, _p_size, __a)     
  1294 
  1295   static
  1296   _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
  1297                                                size_t _p_size, allocator_type __a)
  1298   {
  1299     if (0 == _p_size) return 0;
  1300 
  1301    _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
  1302 
  1303     uninitialized_copy_n(__s, _p_size, __buf);
  1304     _S_cond_store_eos(__buf[_p_size]);
  1305 
  1306     _STLP_TRY {
  1307       return _S_new_RopeLeaf(__buf, _p_size, __a);
  1308     }
  1309     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
  1310             
  1311 # if defined (_STLP_THROW_RETURN_BUG)
  1312       return 0;
  1313 # endif
  1314   }
  1315             
  1316 
  1317   // Concatenation of nonempty strings.
  1318   // Always builds a concatenation node.
  1319   // Rebalances if the result is too deep.
  1320   // Result has refcount 1.
  1321   // Does not increment left and right ref counts even though
  1322   // they are referenced.
  1323   static _RopeRep*
  1324   _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
  1325 
  1326   // Concatenation helper functions
  1327   static _RopeLeaf*
  1328   _S_leaf_concat_char_iter(_RopeLeaf* __r,
  1329                            const _CharT* __iter, size_t __slen);
  1330   // Concatenate by copying leaf.
  1331   // should take an arbitrary iterator
  1332   // result has refcount 1.
  1333 #       ifndef __GC
  1334   static _RopeLeaf* _S_destr_leaf_concat_char_iter
  1335   (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
  1336   // A version that potentially clobbers __r if __r->_M_ref_count == 1.
  1337 #       endif
  1338 
  1339 
  1340   // A helper function for exponentiating strings.
  1341   // This uses a nonstandard refcount convention.
  1342   // The result has refcount 0.
  1343   friend struct _Rope_Concat_fn<_CharT,_Alloc>;
  1344   typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1345 
  1346 public:
  1347   static size_t _S_char_ptr_len(const _CharT* __s) {
  1348     const _CharT* __p = __s;
  1349 	  
  1350     while (!_S_is0(*__p)) { ++__p; }
  1351     return (__p - __s);
  1352   }
  1353 
  1354 public: /* for operators */
  1355   rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
  1356     : _M_tree_ptr(__a, __t) { }
  1357 private:
  1358   // Copy __r to the _CharT buffer.
  1359   // Returns __buffer + __r->_M_size._M_data.
  1360   // Assumes that buffer is uninitialized.
  1361   static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
  1362 
  1363   // Again, with explicit starting position and length.
  1364   // Assumes that buffer is uninitialized.
  1365   static _CharT* _S_flatten(_RopeRep* __r,
  1366                             size_t __start, size_t __len,
  1367                             _CharT* __buffer);
  1368 
  1369   // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
  1370 public:
  1371   static const unsigned long _S_min_len[46];
  1372 protected:
  1373   static bool _S_is_balanced(_RopeRep* __r)
  1374   { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
  1375 
  1376   static bool _S_is_almost_balanced(_RopeRep* __r)
  1377   { return (__r->_M_depth == 0 ||
  1378             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]); }
  1379 
  1380   static bool _S_is_roughly_balanced(_RopeRep* __r)
  1381   { return (__r->_M_depth <= 1 ||
  1382             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]); }
  1383 
  1384   // Assumes the result is not empty.
  1385   static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
  1386                                               _RopeRep* __right)
  1387   {
  1388     _RopeRep* __result = _S_concat_rep(__left, __right);
  1389     if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
  1390     return __result;
  1391   }
  1392 
  1393   // The basic rebalancing operation.  Logically copies the
  1394   // rope.  The result has refcount of 1.  The client will
  1395   // usually decrement the reference count of __r.
  1396   // The result is within height 2 of balanced by the above
  1397   // definition.
  1398   static _RopeRep* _S_balance(_RopeRep* __r);
  1399 
  1400   // Add all unbalanced subtrees to the forest of balanceed trees.
  1401   // Used only by balance.
  1402   static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
  1403         
  1404   // Add __r to forest, assuming __r is already balanced.
  1405   static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
  1406 
  1407   // Print to stdout, exposing structure
  1408   static void _S_dump(_RopeRep* __r, int __indent = 0);
  1409 
  1410   // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
  1411   static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
  1412 
  1413 public:
  1414   bool empty() const { return 0 == _M_tree_ptr._M_data; }
  1415 
  1416   // Comparison member function.  This is public only for those
  1417   // clients that need a ternary comparison.  Others
  1418   // should use the comparison operators below.
  1419   int compare(const _Self& __y) const {
  1420     return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
  1421   }
  1422 
  1423   rope(const _CharT* __s, const allocator_type& __a = allocator_type())
  1424     : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),__a))
  1425   { }
  1426 
  1427   rope(const _CharT* __s, size_t __len,
  1428        const allocator_type& __a = allocator_type())
  1429     : _M_tree_ptr(__a, (_STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a)))
  1430   { }
  1431 
  1432   // Should perhaps be templatized with respect to the iterator type
  1433   // and use Sequence_buffer.  (It should perhaps use sequence_buffer
  1434   // even now.)
  1435   rope(const _CharT *__s, const _CharT *__e,
  1436        const allocator_type& __a = allocator_type())
  1437     : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a))
  1438   { }
  1439 
  1440   rope(const const_iterator& __s, const const_iterator& __e,
  1441        const allocator_type& __a = allocator_type())
  1442     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1443                                     __e._M_current_pos))
  1444   { }
  1445 
  1446   rope(const iterator& __s, const iterator& __e,
  1447        const allocator_type& __a = allocator_type())
  1448     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1449                                     __e._M_current_pos))
  1450   { }
  1451 
  1452   rope(_CharT __c, const allocator_type& __a = allocator_type())
  1453     : _M_tree_ptr(__a, (_RopeRep*)0)
  1454   {
  1455     _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
  1456 
  1457     _Construct(__buf, __c);
  1458     _STLP_TRY {
  1459       _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
  1460     }
  1461     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
  1462       }
  1463 
  1464   rope(size_t __n, _CharT __c,     
  1465        const allocator_type& __a = allocator_type()):
  1466     _M_tree_ptr(__a, (_RopeRep*)0) {
  1467     rope<_CharT,_Alloc> __result;
  1468 # define  __exponentiate_threshold size_t(32)
  1469     _RopeRep* __remainder;
  1470     rope<_CharT,_Alloc> __remainder_rope;
  1471 	    
  1472     // gcc-2.7.2 bugs
  1473     typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1474 	    
  1475     if (0 == __n)
  1476       return;
  1477 	    
  1478     size_t __exponent = __n / __exponentiate_threshold;
  1479     size_t __rest = __n % __exponentiate_threshold;
  1480     if (0 == __rest) {
  1481       __remainder = 0;
  1482     } else {
  1483       _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
  1484       uninitialized_fill_n(__rest_buffer, __rest, __c);
  1485       _S_cond_store_eos(__rest_buffer[__rest]);
  1486       _STLP_TRY {
  1487 		__remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
  1488       }
  1489       _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
  1490 		}
  1491     __remainder_rope._M_tree_ptr._M_data = __remainder;
  1492     if (__exponent != 0) {
  1493       _CharT* __base_buffer =
  1494 		_M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
  1495       _RopeLeaf* __base_leaf;
  1496       rope<_CharT,_Alloc> __base_rope;
  1497       uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
  1498       _S_cond_store_eos(__base_buffer[__exponentiate_threshold]);
  1499       _STLP_TRY {
  1500 		__base_leaf = _S_new_RopeLeaf(__base_buffer,
  1501                                       __exponentiate_threshold, __a);
  1502       }
  1503       _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer, 
  1504                                             __exponentiate_threshold, __a))
  1505 		__base_rope._M_tree_ptr._M_data = __base_leaf;
  1506       if (1 == __exponent) {
  1507 		__result = __base_rope;
  1508 #         ifndef __GC
  1509 		_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
  1510 		// One each for base_rope and __result
  1511 #         endif
  1512       } else {
  1513 		__result = power(__base_rope, __exponent, _Concat_fn());
  1514       }
  1515       if (0 != __remainder) {
  1516 		__result += __remainder_rope;
  1517       }
  1518     } else {
  1519       __result = __remainder_rope;
  1520     }
  1521     _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
  1522     _M_tree_ptr._M_data->_M_ref_nonnil();
  1523 # undef __exponentiate_threshold
  1524   }
  1525 
  1526   rope(const allocator_type& __a = allocator_type())
  1527     : _M_tree_ptr(__a, (_RopeRep*)0) {}
  1528 
  1529   // Construct a rope from a function that can compute its members
  1530   rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
  1531        const allocator_type& __a = allocator_type())
  1532     : _M_tree_ptr(__a, (_RopeRep*)0)
  1533   {
  1534     _M_tree_ptr._M_data = (0 == __len) ?
  1535       0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
  1536   }
  1537 
  1538   rope(const _Self& __x)
  1539     : _M_tree_ptr(__x.get_allocator(), __x._M_tree_ptr._M_data)
  1540   {
  1541     _S_ref(_M_tree_ptr._M_data);
  1542   }
  1543 
  1544   ~rope()
  1545   {
  1546     _S_unref(_M_tree_ptr._M_data);
  1547   }
  1548 
  1549   _Self& operator=(const _Self& __x)
  1550   {
  1551     _RopeRep* __old = _M_tree_ptr._M_data;
  1552     _STLP_ASSERT(get_allocator() == __x.get_allocator())
  1553     _M_tree_ptr._M_data = __x._M_tree_ptr._M_data;
  1554     _S_ref(_M_tree_ptr._M_data);
  1555     _S_unref(__old);
  1556     return(*this);
  1557   }
  1558   void clear()
  1559   {
  1560     _S_unref(_M_tree_ptr._M_data);
  1561     _M_tree_ptr._M_data = 0;
  1562   }
  1563   void push_back(_CharT __x)
  1564   {
  1565     _RopeRep* __old = _M_tree_ptr._M_data;
  1566     _M_tree_ptr._M_data = _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1);
  1567     _S_unref(__old);
  1568   }
  1569 
  1570   void pop_back()
  1571   {
  1572     _RopeRep* __old = _M_tree_ptr._M_data;
  1573     _M_tree_ptr._M_data = 
  1574       _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1575     _S_unref(__old);
  1576   }
  1577 
  1578   _CharT back() const
  1579   {
  1580     return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1581   }
  1582 
  1583   void push_front(_CharT __x)
  1584   {
  1585     _RopeRep* __old = _M_tree_ptr._M_data;
  1586     _RopeRep* __left =
  1587       _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
  1588     _STLP_TRY {
  1589       _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
  1590       _S_unref(__old);
  1591       _S_unref(__left);
  1592     }
  1593     _STLP_UNWIND(_S_unref(__left))
  1594       }
  1595 
  1596   void pop_front()
  1597   {
  1598     _RopeRep* __old = _M_tree_ptr._M_data;
  1599     _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
  1600     _S_unref(__old);
  1601   }
  1602 
  1603   _CharT front() const
  1604   {
  1605     return _S_fetch(_M_tree_ptr._M_data, 0);
  1606   }
  1607 
  1608   void balance()
  1609   {
  1610     _RopeRep* __old = _M_tree_ptr._M_data;
  1611     _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
  1612     _S_unref(__old);
  1613   }
  1614 
  1615   void copy(_CharT* __buffer) const {
  1616     _STLP_STD::_Destroy(__buffer, __buffer + size());
  1617     _S_flatten(_M_tree_ptr._M_data, __buffer);
  1618   }
  1619 
  1620   // This is the copy function from the standard, but
  1621   // with the arguments reordered to make it consistent with the
  1622   // rest of the interface.
  1623   // Note that this guaranteed not to compile if the draft standard
  1624   // order is assumed.
  1625   size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const 
  1626   {
  1627     size_t _p_size = size();
  1628     size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
  1629 
  1630     _STLP_STD::_Destroy(__buffer, __buffer + __len);
  1631     _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
  1632     return __len;
  1633   }
  1634 
  1635   // Print to stdout, exposing structure.  May be useful for
  1636   // performance debugging.
  1637   void dump() {
  1638     _S_dump(_M_tree_ptr._M_data);
  1639   }
  1640 
  1641   // Convert to 0 terminated string in new allocated memory.
  1642   // Embedded 0s in the input do not terminate the copy.
  1643   const _CharT* c_str() const;
  1644 
  1645   // As above, but lso use the flattened representation as the
  1646   // the new rope representation.
  1647   const _CharT* replace_with_c_str();
  1648 
  1649   // Reclaim memory for the c_str generated flattened string.
  1650   // Intentionally undocumented, since it's hard to say when this
  1651   // is safe for multiple threads.
  1652   void delete_c_str () {
  1653     if (0 == _M_tree_ptr._M_data) return;
  1654     if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag && 
  1655         ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data == 
  1656         _M_tree_ptr._M_data->_M_c_string) {
  1657       // Representation shared
  1658       return;
  1659     }
  1660 #           ifndef __GC
  1661     _M_tree_ptr._M_data->_M_free_c_string();
  1662 #           endif
  1663     _M_tree_ptr._M_data->_M_c_string = 0;
  1664   }
  1665 
  1666   _CharT operator[] (size_type __pos) const {
  1667     return _S_fetch(_M_tree_ptr._M_data, __pos);
  1668   }
  1669 
  1670   _CharT at(size_type __pos) const {
  1671     // if (__pos >= size()) throw out_of_range;  // XXX
  1672     return (*this)[__pos];
  1673   }
  1674 
  1675   const_iterator begin() const {
  1676     return(const_iterator(_M_tree_ptr._M_data, 0));
  1677   }
  1678 
  1679   // An easy way to get a const iterator from a non-const container.
  1680   const_iterator const_begin() const {
  1681     return(const_iterator(_M_tree_ptr._M_data, 0));
  1682   }
  1683 
  1684   const_iterator end() const {
  1685     return(const_iterator(_M_tree_ptr._M_data, size()));
  1686   }
  1687 
  1688   const_iterator const_end() const {
  1689     return(const_iterator(_M_tree_ptr._M_data, size()));
  1690   }
  1691 
  1692   size_type size() const { 
  1693     return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
  1694   }
  1695 
  1696   size_type length() const {
  1697     return size();
  1698   }
  1699 
  1700   size_type max_size() const {
  1701     return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
  1702     //  Guarantees that the result can be sufficirntly
  1703     //  balanced.  Longer ropes will probably still work,
  1704     //  but it's harder to make guarantees.
  1705   }
  1706 
  1707   const_reverse_iterator rbegin() const {
  1708     return const_reverse_iterator(end());
  1709   }
  1710 
  1711   const_reverse_iterator const_rbegin() const {
  1712     return const_reverse_iterator(end());
  1713   }
  1714 
  1715   const_reverse_iterator rend() const {
  1716     return const_reverse_iterator(begin());
  1717   }
  1718 
  1719   const_reverse_iterator const_rend() const {
  1720     return const_reverse_iterator(begin());
  1721   }
  1722   // The symmetric cases are intentionally omitted, since they're presumed
  1723   // to be less common, and we don't handle them as well.
  1724 
  1725   // The following should really be templatized.
  1726   // The first argument should be an input iterator or
  1727   // forward iterator with value_type _CharT.
  1728   _Self& append(const _CharT* __iter, size_t __n) {
  1729     _RopeRep* __result = 
  1730       _S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n);
  1731     _S_unref(_M_tree_ptr._M_data);
  1732     _M_tree_ptr._M_data = __result;
  1733     return *this;
  1734   }
  1735 
  1736   _Self& append(const _CharT* __c_string) {
  1737     size_t __len = _S_char_ptr_len(__c_string);
  1738     append(__c_string, __len);
  1739     return(*this);
  1740   }
  1741 
  1742   _Self& append(const _CharT* __s, const _CharT* __e) {
  1743     _RopeRep* __result =
  1744       _S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s);
  1745     _S_unref(_M_tree_ptr._M_data);
  1746     _M_tree_ptr._M_data = __result;
  1747     return *this;
  1748   }
  1749 
  1750   _Self& append(const_iterator __s, const_iterator __e) {
  1751     _STLP_ASSERT(__s._M_root == __e._M_root)
  1752     _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
  1753     _Self_destruct_ptr __appendee(_S_substring(
  1754                                                __s._M_root, __s._M_current_pos, __e._M_current_pos));
  1755     _RopeRep* __result = 
  1756       _S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee);
  1757     _S_unref(_M_tree_ptr._M_data);
  1758     _M_tree_ptr._M_data = __result;
  1759     return *this;
  1760   }
  1761 
  1762   _Self& append(_CharT __c) {
  1763     _RopeRep* __result = 
  1764       _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1);
  1765     _S_unref(_M_tree_ptr._M_data);
  1766     _M_tree_ptr._M_data = __result;
  1767     return *this;
  1768   }
  1769 
  1770   _Self& append() { return append(_CharT()); }  // XXX why?
  1771 
  1772   _Self& append(const _Self& __y) {
  1773     _STLP_ASSERT(__y.get_allocator() == get_allocator())
  1774     _RopeRep* __result = _S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
  1775     _S_unref(_M_tree_ptr._M_data);
  1776     _M_tree_ptr._M_data = __result;
  1777     return *this;
  1778   }
  1779 
  1780   _Self& append(size_t __n, _CharT __c) {
  1781     rope<_CharT,_Alloc> __last(__n, __c);
  1782     return append(__last);
  1783   }
  1784 
  1785   void swap(_Self& __b) {
  1786     _STLP_ASSERT(get_allocator() == __b.get_allocator())
  1787     _RopeRep* __tmp = _M_tree_ptr._M_data;
  1788     _M_tree_ptr._M_data = __b._M_tree_ptr._M_data;
  1789     __b._M_tree_ptr._M_data = __tmp;
  1790   }
  1791 
  1792 
  1793 protected:
  1794   // Result is included in refcount.
  1795   static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
  1796                            size_t __pos2, _RopeRep* __r) {
  1797     if (0 == __old) { _S_ref(__r); return __r; }
  1798     _Self_destruct_ptr __left(
  1799                               _S_substring(__old, 0, __pos1));
  1800     _Self_destruct_ptr __right(
  1801                                _S_substring(__old, __pos2, __old->_M_size._M_data));
  1802 	_STLP_MPWFIX_TRY	//*TY 06/01/2000 - 
  1803     _RopeRep* __result;
  1804 
  1805     if (0 == __r) {
  1806       __result = _S_concat_rep(__left, __right);
  1807     } else {
  1808       _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
  1809       _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
  1810       __result = _S_concat_rep(__left_result, __right);
  1811     }
  1812     return __result;
  1813 	_STLP_MPWFIX_CATCH	//*TY 06/01/2000 - 
  1814   }
  1815 
  1816 public:
  1817   void insert(size_t __p, const _Self& __r) {
  1818     _RopeRep* __result = 
  1819       replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data);
  1820     _STLP_ASSERT(get_allocator() == __r.get_allocator())
  1821     _S_unref(_M_tree_ptr._M_data);
  1822     _M_tree_ptr._M_data = __result;
  1823   }
  1824 
  1825   void insert(size_t __p, size_t __n, _CharT __c) {
  1826     rope<_CharT,_Alloc> __r(__n,__c);
  1827     insert(__p, __r);
  1828   }
  1829 
  1830   void insert(size_t __p, const _CharT* __i, size_t __n) {
  1831     _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
  1832     _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
  1833     _Self_destruct_ptr __left_result(
  1834                                      _S_concat_char_iter(__left, __i, __n));
  1835     // _S_ destr_concat_char_iter should be safe here.
  1836     // But as it stands it's probably not a win, since __left
  1837     // is likely to have additional references.
  1838     _RopeRep* __result = _S_concat_rep(__left_result, __right);
  1839     _S_unref(_M_tree_ptr._M_data);
  1840     _M_tree_ptr._M_data = __result;
  1841   }
  1842 
  1843   void insert(size_t __p, const _CharT* __c_string) {
  1844     insert(__p, __c_string, _S_char_ptr_len(__c_string));
  1845   }
  1846 
  1847   void insert(size_t __p, _CharT __c) {
  1848     insert(__p, &__c, 1);
  1849   }
  1850 
  1851   void insert(size_t __p) {
  1852     _CharT __c = _CharT();
  1853     insert(__p, &__c, 1);
  1854   }
  1855 
  1856   void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
  1857     _Self __r(__i, __j);
  1858     insert(__p, __r);
  1859   }
  1860 
  1861   void insert(size_t __p, const const_iterator& __i,
  1862               const const_iterator& __j) {
  1863     _Self __r(__i, __j);
  1864     insert(__p, __r);
  1865   }
  1866 
  1867   void insert(size_t __p, const iterator& __i,
  1868               const iterator& __j) {
  1869     _Self __r(__i, __j);
  1870     insert(__p, __r);
  1871   }
  1872 
  1873   // (position, length) versions of replace operations:
  1874 
  1875   void replace(size_t __p, size_t __n, const _Self& __r) {
  1876     _RopeRep* __result = 
  1877       replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data);
  1878     _S_unref(_M_tree_ptr._M_data);
  1879     _M_tree_ptr._M_data = __result;
  1880   }
  1881 
  1882   void replace(size_t __p, size_t __n, 
  1883                const _CharT* __i, size_t __i_len) {
  1884     _Self __r(__i, __i_len);
  1885     replace(__p, __n, __r);
  1886   }
  1887 
  1888   void replace(size_t __p, size_t __n, _CharT __c) {
  1889     _Self __r(__c);
  1890     replace(__p, __n, __r);
  1891   }
  1892 
  1893   void replace(size_t __p, size_t __n, const _CharT* __c_string) {
  1894     _Self __r(__c_string);
  1895     replace(__p, __n, __r);
  1896   }
  1897 
  1898   void replace(size_t __p, size_t __n, 
  1899                const _CharT* __i, const _CharT* __j) {
  1900     _Self __r(__i, __j);
  1901     replace(__p, __n, __r);
  1902   }
  1903 
  1904   void replace(size_t __p, size_t __n,
  1905                const const_iterator& __i, const const_iterator& __j) {
  1906     _Self __r(__i, __j);
  1907     replace(__p, __n, __r);
  1908   }
  1909 
  1910   void replace(size_t __p, size_t __n,
  1911                const iterator& __i, const iterator& __j) {
  1912     _Self __r(__i, __j);
  1913     replace(__p, __n, __r);
  1914   }
  1915 
  1916   // Single character variants:
  1917   void replace(size_t __p, _CharT __c) {
  1918     iterator __i(this, __p);
  1919     *__i = __c;
  1920   }
  1921 
  1922   void replace(size_t __p, const _Self& __r) {
  1923     replace(__p, 1, __r);
  1924   }
  1925 
  1926   void replace(size_t __p, const _CharT* __i, size_t __i_len) {
  1927     replace(__p, 1, __i, __i_len);
  1928   }
  1929 
  1930   void replace(size_t __p, const _CharT* __c_string) {
  1931     replace(__p, 1, __c_string);
  1932   }
  1933 
  1934   void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
  1935     replace(__p, 1, __i, __j);
  1936   }
  1937 
  1938   void replace(size_t __p, const const_iterator& __i,
  1939                const const_iterator& __j) {
  1940     replace(__p, 1, __i, __j);
  1941   }
  1942 
  1943   void replace(size_t __p, const iterator& __i,
  1944                const iterator& __j) {
  1945     replace(__p, 1, __i, __j);
  1946   }
  1947 
  1948   // Erase, (position, size) variant.
  1949   void erase(size_t __p, size_t __n) {
  1950     _RopeRep* __result = replace(_M_tree_ptr._M_data, __p, __p + __n, 0);
  1951     _S_unref(_M_tree_ptr._M_data);
  1952     _M_tree_ptr._M_data = __result;
  1953   }
  1954 
  1955   // Erase, single character
  1956   void erase(size_t __p) {
  1957     erase(__p, __p + 1);
  1958   }
  1959 
  1960   // Insert, iterator variants.  
  1961   iterator insert(const iterator& __p, const _Self& __r)
  1962   { insert(__p.index(), __r); return __p; }
  1963   iterator insert(const iterator& __p, size_t __n, _CharT __c)
  1964   { insert(__p.index(), __n, __c); return __p; }
  1965   iterator insert(const iterator& __p, _CharT __c) 
  1966   { insert(__p.index(), __c); return __p; }
  1967   iterator insert(const iterator& __p ) 
  1968   { insert(__p.index()); return __p; }
  1969   iterator insert(const iterator& __p, const _CharT* c_string) 
  1970   { insert(__p.index(), c_string); return __p; }
  1971   iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
  1972   { insert(__p.index(), __i, __n); return __p; }
  1973   iterator insert(const iterator& __p, const _CharT* __i, 
  1974                   const _CharT* __j)
  1975   { insert(__p.index(), __i, __j);  return __p; }
  1976   iterator insert(const iterator& __p,
  1977                   const const_iterator& __i, const const_iterator& __j)
  1978   { insert(__p.index(), __i, __j); return __p; }
  1979   iterator insert(const iterator& __p,
  1980                   const iterator& __i, const iterator& __j)
  1981   { insert(__p.index(), __i, __j); return __p; }
  1982 
  1983   // Replace, range variants.
  1984   void replace(const iterator& __p, const iterator& __q,
  1985                const _Self& __r)
  1986   { replace(__p.index(), __q.index() - __p.index(), __r); }
  1987   void replace(const iterator& __p, const iterator& __q, _CharT __c)
  1988   { replace(__p.index(), __q.index() - __p.index(), __c); }
  1989   void replace(const iterator& __p, const iterator& __q,
  1990                const _CharT* __c_string)
  1991   { replace(__p.index(), __q.index() - __p.index(), __c_string); }
  1992   void replace(const iterator& __p, const iterator& __q,
  1993                const _CharT* __i, size_t __n)
  1994   { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
  1995   void replace(const iterator& __p, const iterator& __q,
  1996                const _CharT* __i, const _CharT* __j)
  1997   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1998   void replace(const iterator& __p, const iterator& __q,
  1999                const const_iterator& __i, const const_iterator& __j)
  2000   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  2001   void replace(const iterator& __p, const iterator& __q,
  2002                const iterator& __i, const iterator& __j)
  2003   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  2004 
  2005   // Replace, iterator variants.
  2006   void replace(const iterator& __p, const _Self& __r)
  2007   { replace(__p.index(), __r); }
  2008   void replace(const iterator& __p, _CharT __c)
  2009   { replace(__p.index(), __c); }
  2010   void replace(const iterator& __p, const _CharT* __c_string)
  2011   { replace(__p.index(), __c_string); }
  2012   void replace(const iterator& __p, const _CharT* __i, size_t __n)
  2013   { replace(__p.index(), __i, __n); }
  2014   void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
  2015   { replace(__p.index(), __i, __j); }
  2016   void replace(const iterator& __p, const_iterator __i, 
  2017                const_iterator __j)
  2018   { replace(__p.index(), __i, __j); }
  2019   void replace(const iterator& __p, iterator __i, iterator __j)
  2020   { replace(__p.index(), __i, __j); }
  2021 
  2022   // Iterator and range variants of erase
  2023   iterator erase(const iterator& __p, const iterator& __q) {
  2024     size_t __p_index = __p.index();
  2025     erase(__p_index, __q.index() - __p_index);
  2026     return iterator(this, __p_index);
  2027   }
  2028   iterator erase(const iterator& __p) {
  2029     size_t __p_index = __p.index();
  2030     erase(__p_index, 1);
  2031     return iterator(this, __p_index);
  2032   }
  2033 
  2034   _Self substr(size_t __start, size_t __len = 1) const {
  2035     return rope<_CharT,_Alloc>(
  2036                                _S_substring(_M_tree_ptr._M_data, __start, __start + __len));
  2037   }
  2038 
  2039   _Self substr(iterator __start, iterator __end) const {
  2040     return rope<_CharT,_Alloc>(
  2041                                _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  2042   }
  2043         
  2044   _Self substr(iterator __start) const {
  2045     size_t __pos = __start.index();
  2046     return rope<_CharT,_Alloc>(
  2047                                _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  2048   }
  2049         
  2050   _Self substr(const_iterator __start, const_iterator __end) const {
  2051     // This might eventually take advantage of the cache in the
  2052     // iterator.
  2053     return rope<_CharT,_Alloc>(
  2054                                _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  2055   }
  2056 
  2057   rope<_CharT,_Alloc> substr(const_iterator __start) {
  2058     size_t __pos = __start.index();
  2059     return rope<_CharT,_Alloc>(
  2060                                _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  2061   }
  2062 
  2063   enum { npos = -1 };
  2064 
  2065   //         static const size_type npos;
  2066 
  2067   size_type find(_CharT __c, size_type __pos = 0) const;
  2068   size_type find(const _CharT* __s, size_type __pos = 0) const {
  2069     size_type __result_pos;
  2070     const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
  2071                                      __s, __s + _S_char_ptr_len(__s));
  2072     __result_pos = __result.index();
  2073 #           ifndef _STLP_OLD_ROPE_SEMANTICS
  2074     if (__result_pos == size()) __result_pos = npos;
  2075 #           endif
  2076     return __result_pos;
  2077   }
  2078 
  2079   iterator mutable_begin() {
  2080     return(iterator(this, 0));
  2081   }
  2082 
  2083   iterator mutable_end() {
  2084     return(iterator(this, size()));
  2085   }
  2086 
  2087   reverse_iterator mutable_rbegin() {
  2088     return reverse_iterator(mutable_end());
  2089   }
  2090 
  2091   reverse_iterator mutable_rend() {
  2092     return reverse_iterator(mutable_begin());
  2093   }
  2094 
  2095   reference mutable_reference_at(size_type __pos) {
  2096     return reference(this, __pos);
  2097   }
  2098 
  2099 #       ifdef __STD_STUFF
  2100   reference operator[] (size_type __pos) {
  2101     return reference(this, __pos);
  2102   }
  2103 
  2104   reference at(size_type __pos) {
  2105     // if (__pos >= size()) throw out_of_range;  // XXX
  2106     return (*this)[__pos];
  2107   }
  2108 
  2109   void resize(size_type, _CharT) {}
  2110   void resize(size_type) {}
  2111   void reserve(size_type = 0) {}
  2112   size_type capacity() const {
  2113     return max_size();
  2114   }
  2115 
  2116   // Stuff below this line is dangerous because it's error prone.
  2117   // I would really like to get rid of it.
  2118   // copy function with funny arg ordering.
  2119   size_type copy(_CharT* __buffer, size_type __n, 
  2120                  size_type __pos = 0) const {
  2121     return copy(__pos, __n, __buffer);
  2122   }
  2123 
  2124   iterator end() { return mutable_end(); }
  2125 
  2126   iterator begin() { return mutable_begin(); }
  2127 
  2128   reverse_iterator rend() { return mutable_rend(); }
  2129 
  2130   reverse_iterator rbegin() { return mutable_rbegin(); }
  2131 
  2132 #       else
  2133 
  2134   const_iterator end() { return const_end(); }
  2135 
  2136   const_iterator begin() { return const_begin(); }
  2137 
  2138   const_reverse_iterator rend() { return const_rend(); }
  2139   
  2140   const_reverse_iterator rbegin() { return const_rbegin(); }
  2141 
  2142 #	endif
  2143 
  2144   __ROPE_DEFINE_ALLOCS(_Alloc, _M_tree_ptr)
  2145     };
  2146 
  2147 # undef __ROPE_DEFINE_ALLOC
  2148 # undef __ROPE_DEFINE_ALLOCS
  2149 
  2150 template <class _CharT, class _Alloc>
  2151 inline _CharT 
  2152 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
  2153 {
  2154   return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n);
  2155 }
  2156 
  2157 template <class _CharT, class _Alloc>
  2158 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2159                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2160   return (__x._M_current_pos == __y._M_current_pos && 
  2161           __x._M_root == __y._M_root);
  2162 }
  2163 
  2164 template <class _CharT, class _Alloc>
  2165 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2166                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2167   return (__x._M_current_pos < __y._M_current_pos);
  2168 }
  2169 
  2170 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2171 
  2172 template <class _CharT, class _Alloc>
  2173 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2174                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2175   return !(__x == __y);
  2176 }
  2177 
  2178 template <class _CharT, class _Alloc>
  2179 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2180                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2181   return __y < __x;
  2182 }
  2183 
  2184 template <class _CharT, class _Alloc>
  2185 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2186                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2187   return !(__y < __x);
  2188 }
  2189 
  2190 template <class _CharT, class _Alloc>
  2191 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2192                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2193   return !(__x < __y);
  2194 }
  2195 
  2196 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2197 
  2198 template <class _CharT, class _Alloc>
  2199 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
  2200                            const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  2201   return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
  2202 }
  2203 
  2204 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000		// dwa 8/21/97  - "ambiguous access to overloaded function" bug.
  2205 template <class _CharT, class _Alloc>
  2206 inline _Rope_const_iterator<_CharT,_Alloc>
  2207 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
  2208   return _Rope_const_iterator<_CharT,_Alloc>(
  2209                                              __x._M_root, __x._M_current_pos - __n);
  2210 }
  2211 # endif
  2212 
  2213 template <class _CharT, class _Alloc>
  2214 inline _Rope_const_iterator<_CharT,_Alloc>
  2215 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
  2216   return _Rope_const_iterator<_CharT,_Alloc>(
  2217                                              __x._M_root, __x._M_current_pos + __n);
  2218 }
  2219 
  2220 template <class _CharT, class _Alloc>
  2221 inline _Rope_const_iterator<_CharT,_Alloc>
  2222 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
  2223   return _Rope_const_iterator<_CharT,_Alloc>(
  2224                                              __x._M_root, __x._M_current_pos + __n);
  2225 }
  2226 
  2227 template <class _CharT, class _Alloc>
  2228 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
  2229                         const _Rope_iterator<_CharT,_Alloc>& __y) {
  2230   return (__x._M_current_pos == __y._M_current_pos && 
  2231           __x._M_root_rope == __y._M_root_rope);
  2232 }
  2233 
  2234 template <class _CharT, class _Alloc>
  2235 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
  2236                        const _Rope_iterator<_CharT,_Alloc>& __y) {
  2237   return (__x._M_current_pos < __y._M_current_pos);
  2238 }
  2239 
  2240 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2241 
  2242 template <class _CharT, class _Alloc>
  2243 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2244                         const _Rope_iterator<_CharT,_Alloc>& __y) {
  2245   return !(__x == __y);
  2246 }
  2247 
  2248 template <class _CharT, class _Alloc>
  2249 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
  2250                        const _Rope_iterator<_CharT,_Alloc>& __y) {
  2251   return __y < __x;
  2252 }
  2253 
  2254 template <class _CharT, class _Alloc>
  2255 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2256                         const _Rope_iterator<_CharT,_Alloc>& __y) {
  2257   return !(__y < __x);
  2258 }
  2259 
  2260 template <class _CharT, class _Alloc>
  2261 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
  2262                         const _Rope_iterator<_CharT,_Alloc>& __y) {
  2263   return !(__x < __y);
  2264 }
  2265 
  2266 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2267 
  2268 template <class _CharT, class _Alloc>
  2269 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  2270                            const _Rope_iterator<_CharT,_Alloc>& __y) {
  2271   return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
  2272 }
  2273 
  2274 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000		// dwa 8/21/97  - "ambiguous access to overloaded function" bug.
  2275 template <class _CharT, class _Alloc>
  2276 inline _Rope_iterator<_CharT,_Alloc>
  2277 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  2278           ptrdiff_t __n) {
  2279   return _Rope_iterator<_CharT,_Alloc>(
  2280                                        __x._M_root_rope, __x._M_current_pos - __n);
  2281 }
  2282 # endif
  2283 
  2284 template <class _CharT, class _Alloc>
  2285 inline _Rope_iterator<_CharT,_Alloc>
  2286 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
  2287           ptrdiff_t __n) {
  2288   return _Rope_iterator<_CharT,_Alloc>(
  2289                                        __x._M_root_rope, __x._M_current_pos + __n);
  2290 }
  2291 
  2292 template <class _CharT, class _Alloc>
  2293 inline _Rope_iterator<_CharT,_Alloc>
  2294 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
  2295   return _Rope_iterator<_CharT,_Alloc>(
  2296                                        __x._M_root_rope, __x._M_current_pos + __n);
  2297 }
  2298 
  2299 template <class _CharT, class _Alloc>
  2300 inline
  2301 rope<_CharT,_Alloc>
  2302 operator+ (const rope<_CharT,_Alloc>& __left,
  2303            const rope<_CharT,_Alloc>& __right)
  2304 {
  2305   _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
  2306   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
  2307   // Inlining this should make it possible to keep __left and
  2308   // __right in registers.
  2309 }
  2310 
  2311 template <class _CharT, class _Alloc>
  2312 inline
  2313 rope<_CharT,_Alloc>&
  2314 operator+= (rope<_CharT,_Alloc>& __left, 
  2315             const rope<_CharT,_Alloc>& __right)
  2316 {
  2317   __left.append(__right);
  2318   return __left;
  2319 }
  2320 
  2321 template <class _CharT, class _Alloc>
  2322 inline
  2323 rope<_CharT,_Alloc>
  2324 operator+ (const rope<_CharT,_Alloc>& __left,
  2325            const _CharT* __right) {
  2326   size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
  2327   return rope<_CharT,_Alloc>(
  2328                              rope<_CharT,_Alloc>::_S_concat_char_iter(
  2329                                                                       __left._M_tree_ptr._M_data, __right, __rlen)); 
  2330 }
  2331 
  2332 template <class _CharT, class _Alloc>
  2333 inline
  2334 rope<_CharT,_Alloc>&
  2335 operator+= (rope<_CharT,_Alloc>& __left,
  2336             const _CharT* __right) {
  2337   __left.append(__right);
  2338   return __left;
  2339 }
  2340 
  2341 template <class _CharT, class _Alloc>
  2342 inline
  2343 rope<_CharT,_Alloc>
  2344 operator+ (const rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
  2345   return rope<_CharT,_Alloc>(
  2346                              rope<_CharT,_Alloc>::_S_concat_char_iter(
  2347                                                                       __left._M_tree_ptr._M_data, &__right, 1));
  2348 }
  2349 
  2350 template <class _CharT, class _Alloc>
  2351 inline
  2352 rope<_CharT,_Alloc>&
  2353 operator+= (rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
  2354   __left.append(__right);
  2355   return __left;
  2356 }
  2357 
  2358 template <class _CharT, class _Alloc>
  2359 inline bool
  2360 operator< (const rope<_CharT,_Alloc>& __left, 
  2361            const rope<_CharT,_Alloc>& __right) {
  2362   return __left.compare(__right) < 0;
  2363 }
  2364         
  2365 template <class _CharT, class _Alloc>
  2366 inline bool
  2367 operator== (const rope<_CharT,_Alloc>& __left, 
  2368             const rope<_CharT,_Alloc>& __right) {
  2369   return __left.compare(__right) == 0;
  2370 }
  2371 
  2372 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  2373 
  2374 template <class _CharT, class _Alloc>
  2375 inline bool
  2376 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2377   return !(__x == __y);
  2378 }
  2379 
  2380 template <class _CharT, class _Alloc>
  2381 inline bool
  2382 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2383   return __y < __x;
  2384 }
  2385 
  2386 template <class _CharT, class _Alloc>
  2387 inline bool
  2388 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2389   return !(__y < __x);
  2390 }
  2391 
  2392 template <class _CharT, class _Alloc>
  2393 inline bool
  2394 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  2395   return !(__x < __y);
  2396 }
  2397 
  2398 template <class _CharT, class _Alloc>
  2399 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  2400                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  2401   return !(__x == __y);
  2402 }
  2403 
  2404 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  2405 
  2406 template <class _CharT, class _Alloc>
  2407 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  2408                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  2409   return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
  2410 }
  2411 
  2412 #ifdef _STLP_USE_NEW_IOSTREAMS
  2413 template<class _CharT, class _Traits, class _Alloc>
  2414 basic_ostream<_CharT, _Traits>& operator<< (
  2415                                             basic_ostream<_CharT, _Traits>& __o,
  2416                                             const rope<_CharT, _Alloc>& __r);
  2417 #elif ! defined (_STLP_USE_NO_IOSTREAMS)
  2418 template<class _CharT, class _Alloc>
  2419 ostream& operator<< (ostream& __o, const rope<_CharT,_Alloc>& __r);        
  2420 #endif
  2421         
  2422 typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
  2423 # ifdef _STLP_HAS_WCHAR_T
  2424 typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
  2425 # endif
  2426 
  2427 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
  2428 {
  2429   return __c.mutable_reference_at(__i);
  2430 }
  2431 
  2432 # ifdef _STLP_HAS_WCHAR_T
  2433 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
  2434 {
  2435   return __c.mutable_reference_at(__i);
  2436 }
  2437 # endif
  2438 
  2439 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
  2440 
  2441 template <class _CharT, class _Alloc>
  2442 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
  2443   __x.swap(__y);
  2444 }
  2445 #else
  2446 
  2447 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
  2448 # ifdef _STLP_HAS_WCHAR_T	// dwa 8/21/97
  2449 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
  2450 # endif
  2451 
  2452 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
  2453 
  2454 
  2455 // Hash functions should probably be revisited later:
  2456 _STLP_TEMPLATE_NULL struct hash<crope>
  2457 {
  2458   size_t operator()(const crope& __str) const
  2459   {
  2460     size_t _p_size = __str.size();
  2461 
  2462     if (0 == _p_size) return 0;
  2463     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2464   }
  2465 };
  2466 
  2467 # ifdef _STLP_HAS_WCHAR_T	// dwa 8/21/97
  2468 _STLP_TEMPLATE_NULL struct hash<wrope>
  2469 {
  2470   size_t operator()(const wrope& __str) const
  2471   {
  2472     size_t _p_size = __str.size();
  2473 
  2474     if (0 == _p_size) return 0;
  2475     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2476   }
  2477 };
  2478 #endif
  2479 
  2480 #ifndef _STLP_MSVC
  2481 // I couldn't get this to work with VC++
  2482 template<class _CharT,class _Alloc>
  2483 void
  2484 _Rope_rotate(_Rope_iterator<_CharT,_Alloc> __first,
  2485              _Rope_iterator<_CharT,_Alloc> __middle,
  2486              _Rope_iterator<_CharT,_Alloc> __last);
  2487 
  2488 #if !defined(__GNUC__)
  2489 // Appears to confuse g++
  2490 inline void rotate(_Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __first,
  2491                    _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __middle,
  2492                    _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __last) {
  2493   _Rope_rotate(__first, __middle, __last);
  2494 }
  2495 #endif
  2496 
  2497 #endif
  2498 
  2499 template <class _CharT, class _Alloc>
  2500 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const
  2501 {
  2502   if (_M_current_valid) {
  2503 	return _M_current;
  2504   } else {
  2505     return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
  2506   }
  2507 }
  2508 _STLP_END_NAMESPACE
  2509 
  2510 # if !defined (_STLP_LINK_TIME_INSTANTIATION)
  2511 #  include <stl/_rope.c>
  2512 # endif
  2513 
  2514 # endif /* _STLP_INTERNAL_ROPE_H */
  2515 
  2516 // Local Variables:
  2517 // mode:C++
  2518 // End: