3 * Copyright (c) 1996,1997
4 * Silicon Graphics Computer Systems, Inc.
7 * Moscow Center for SPARC Technology
12 * This material is provided "as is", with absolutely no warranty expressed
13 * or implied. Any use is at your own risk.
15 * Permission to use or copy this software for any purpose is hereby granted
16 * without fee, provided the above notices are retained on all copies.
17 * Permission to modify the code and to distribute modified code is granted,
18 * provided the above notices are retained, and a notice that the code was
19 * modified is included with the above copyright notice.
23 /* NOTE: This is an internal header file, included by other STL headers.
24 * You should not attempt to use it directly.
27 // rope<_CharT,_Alloc> is a sequence of _CharT.
28 // Ropes appear to be mutable, but update operations
29 // really copy enough of the data structure to leave the original
30 // valid. Thus ropes can be logically copied by just copying
33 #ifndef _STLP_INTERNAL_ROPE_H
34 #define _STLP_INTERNAL_ROPE_H
36 #ifndef _STLP_INTERNAL_ALGOBASE_H
37 # include <stl/_algobase.h>
44 #ifndef _STLP_INTERNAL_ALLOC_H
45 # include <stl/_alloc.h>
48 #ifndef _STLP_INTERNAL_ITERATOR_H
49 # include <stl/_iterator.h>
52 #ifndef _STLP_INTERNAL_ALGO_H
53 # include <stl/_algo.h>
56 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
57 # include <stl/_function_base.h>
60 #ifndef _STLP_INTERNAL_NUMERIC_H
61 # include <stl/_numeric.h>
64 #ifndef _STLP_INTERNAL_HASH_FUN_H
65 # include <stl/_hash_fun.h>
68 #ifndef _STLP_CHAR_TRAITS_H
69 # include <stl/char_traits.h>
72 #ifndef _STLP_INTERNAL_THREADS_H
73 # include <stl/_threads.h>
76 #ifdef _STLP_SGI_THREADS
80 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
81 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
82 #elif defined(__MRC__)||defined(__SC__)
83 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
85 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
90 // First a lot of forward declarations. The standard seems to require
91 // much stricter "declaration before use" than many of the implementations
93 template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
94 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
95 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
96 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
97 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
98 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
99 template<class _CharT, class _Alloc> class _Rope_iterator;
100 template<class _CharT, class _Alloc> class _Rope_const_iterator;
101 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
102 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
104 _STLP_MOVE_TO_PRIV_NAMESPACE
106 // Some helpers, so we can use the power algorithm on ropes.
107 // See below for why this isn't local to the implementation.
109 // This uses a nonstandard refcount convention.
110 // The result has refcount 0.
111 template<class _CharT, class _Alloc>
112 struct _Rope_Concat_fn
113 : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
114 rope<_CharT,_Alloc> > {
115 rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
116 const rope<_CharT,_Alloc>& __y) {
121 template <class _CharT, class _Alloc>
124 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
125 { return rope<_CharT,_Alloc>(); }
127 _STLP_MOVE_TO_STD_NAMESPACE
130 template <class _CharT>
131 inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
134 template <class _CharT>
135 inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
136 { _STLP_STD::_Construct(__p); }
138 template <class _CharT>
139 inline void _S_construct_null(_CharT *__p) {
140 typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
141 _S_construct_null_aux(__p, _Char_Is_Integral());
144 // char_producers are logically functions that generate a section of
145 // a string. These can be converted to ropes. The resulting rope
146 // invokes the char_producer on demand. This allows, for example,
147 // files to be viewed as ropes without reading the entire file.
148 template <class _CharT>
149 class char_producer {
151 virtual ~char_producer() {}
152 virtual void operator()(size_t __start_pos, size_t __len,
153 _CharT* __buffer) = 0;
154 // Buffer should really be an arbitrary output iterator.
155 // That way we could flatten directly into an ostream, etc.
156 // This is thoroughly impossible, since iterator types don't
157 // have runtime descriptions.
162 // Sequence must provide an append operation that appends an
163 // array to the sequence. Sequence buffers are useful only if
164 // appending an entire array is cheaper than appending element by element.
165 // This is true for many string representations.
166 // This should perhaps inherit from ostream<sequence::value_type>
167 // and be implemented correspondingly, so that they can be used
168 // for formatted. For the sake of portability, we don't do this yet.
170 // For now, sequence buffers behave as output iterators. But they also
171 // behave a little like basic_ostringstream<sequence::value_type> and a
172 // little like containers.
174 template<class _Sequence
175 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
176 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
177 , size_t _Buf_sz = 100
178 # if defined(__sgi) && !defined(__GNUC__)
179 # define __TYPEDEF_WORKAROUND
180 ,class _V = typename _Sequence::value_type
182 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
184 // The 3rd parameter works around a common compiler bug.
185 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
187 # ifndef __TYPEDEF_WORKAROUND
188 typedef typename _Sequence::value_type value_type;
189 typedef sequence_buffer<_Sequence
190 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
191 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
194 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
196 enum { _Buf_sz = 100};
197 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
199 # else /* __TYPEDEF_WORKAROUND */
200 typedef _V value_type;
201 typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
202 # endif /* __TYPEDEF_WORKAROUND */
204 _Sequence* _M_prefix;
205 value_type _M_buffer[_Buf_sz];
209 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
212 ~sequence_buffer() { flush(); }
213 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
214 sequence_buffer(const _Self& __x) {
215 _M_prefix = __x._M_prefix;
216 _M_buf_count = __x._M_buf_count;
217 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
219 sequence_buffer(_Self& __x) {
221 _M_prefix = __x._M_prefix;
224 sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
225 _Self& operator= (_Self& __x) {
227 _M_prefix = __x._M_prefix;
231 _Self& operator= (const _Self& __x) {
232 _M_prefix = __x._M_prefix;
233 _M_buf_count = __x._M_buf_count;
234 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
237 void push_back(value_type __x) {
238 if (_M_buf_count < _Buf_sz) {
239 _M_buffer[_M_buf_count] = __x;
247 void append(const value_type *__s, size_t __len) {
248 if (__len + _M_buf_count <= _Buf_sz) {
249 size_t __i = _M_buf_count;
251 for (; __j < __len; __i++, __j++) {
252 _M_buffer[__i] = __s[__j];
254 _M_buf_count += __len;
255 } else if (0 == _M_buf_count) {
256 _M_prefix->append(__s, __s + __len);
262 _Self& write(const value_type *__s, size_t __len) {
266 _Self& put(value_type __x) {
270 _Self& operator=(const value_type& __rhs) {
274 _Self& operator*() { return *this; }
275 _Self& operator++() { return *this; }
276 _Self& operator++(int) { return *this; }
279 // The following should be treated as private, at least for now.
280 template<class _CharT>
281 class _Rope_char_consumer {
282 #if !defined (_STLP_MEMBER_TEMPLATES)
284 //Without member templates we have to use run-time parameterization.
285 // The symmetry with char_producer is accidental and temporary.
286 virtual ~_Rope_char_consumer() {}
287 virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
292 // What follows should really be local to rope. Unfortunately,
293 // that doesn't work, since it makes it impossible to define generic
294 // equality on rope iterators. According to the draft standard, the
295 // template parameters for such an equality operator cannot be inferred
296 // from the occurence of a member class as a parameter.
297 // (SGI compilers in fact allow this, but the __result wouldn't be
299 // Similarly, some of the static member functions are member functions
300 // only to avoid polluting the global namespace, and to circumvent
301 // restrictions on type inference for template functions.
305 // The internal data structure for representing a rope. This is
306 // private to the implementation. A rope is really just a pointer
309 // A few basic functions for manipulating this data structure
310 // are members of _RopeRep. Most of the more complex algorithms
311 // are implemented as rope members.
313 // Some of the static member functions of _RopeRep have identically
314 // named functions in rope that simply invoke the _RopeRep versions.
317 template<class _CharT, class _Alloc>
319 : public _Refcount_Base
321 typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
326 // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
327 // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
328 // but this addition causes the sunpro compiler to complain about
329 // multiple declarations during the initialization of "_S_min_len".
330 // Changed to be a fixed value and the sunpro compiler appears to
333 # define __ROPE_MAX_DEPTH 45
334 # define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
335 enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
336 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
337 // Apparently needed by VC++
338 // The data fields of leaves are allocated with some
339 // extra space, to accomodate future growth and for basic
340 // character types, to hold a trailing eos character.
341 enum { _S_alloc_granularity = 8 };
344 bool _M_is_balanced:8;
346 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
347 typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
349 allocator_type get_allocator() const { return allocator_type(_M_size); }
351 unsigned char _M_depth;
352 _CharT* _STLP_VOLATILE _M_c_string;
353 _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
355 # ifdef _STLP_NO_ARROW_OPERATOR
356 _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
359 /* Flattened version of string, if needed. */
361 /* If it's not 0, then the memory is owned */
363 /* In the case of a leaf, this may point to */
364 /* the same memory as the data field. */
365 _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
366 allocator_type __a) :
368 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
371 typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
372 # ifdef _STLP_HAS_WCHAR_T
373 typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
375 typedef __false_type _IsWCharT;
378 typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
381 /* Please tell why this code is necessary if you uncomment it.
382 * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
383 * returns a size > n in order to store the terminating null charater. When
384 * instanciation type is not a char or wchar_t this is not guaranty resulting in
387 static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
388 // Allow slop for in-place expansion.
389 return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
392 static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
393 // Allow slop for in-place expansion.
394 return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
397 // fbp : moved from RopeLeaf
398 static size_t _S_rounded_up_size(size_t __n)
399 //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
400 { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
402 static void _S_free_string( _CharT* __s, size_t __len,
403 allocator_type __a) {
404 _STLP_STD::_Destroy_Range(__s, __s + __len);
405 // This has to be a static member, so this gets a bit messy
406 # ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
407 __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
409 __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
413 // Deallocate data section of a leaf.
414 // This shouldn't be a member function.
415 // But its hard to do anything else at the
416 // moment, because it's templatized w.r.t.
418 // Does nothing if __GC is defined.
419 void _M_free_c_string();
421 // Deallocate t. Assumes t is not 0.
422 void _M_unref_nonnil() {
423 if (_M_decr() == 0) _M_free_tree();
425 void _M_ref_nonnil() {
428 static void _S_unref(_Self* __t) {
430 __t->_M_unref_nonnil();
433 static void _S_ref(_Self* __t) {
434 if (0 != __t) __t->_M_incr();
436 //static void _S_free_if_unref(_Self* __t) {
437 // if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
441 template<class _CharT, class _Alloc>
442 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
444 _CharT* _M_data; /* Not necessarily 0 terminated. */
445 /* The allocated size is */
446 /* _S_rounded_up_size(size), except */
447 /* in the GC case, in which it */
448 /* doesn't matter. */
450 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
451 typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
452 void _M_init(__true_type const& /*_IsBasicCharType*/) {
453 this->_M_c_string = _M_data;
455 void _M_init(__false_type const& /*_IsBasicCharType*/) {}
458 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
459 typedef typename _RopeRep::allocator_type allocator_type;
461 _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
462 : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
464 _STLP_ASSERT(_p_size > 0)
465 _M_init(_IsBasicCharType());
468 # ifdef _STLP_NO_ARROW_OPERATOR
470 _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
473 // The constructor assumes that d has been allocated with
474 // the proper allocator and the properly padded size.
475 // In contrast, the destructor deallocates the data:
477 if (_M_data != this->_M_c_string) {
478 this->_M_free_c_string();
480 _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
484 template<class _CharT, class _Alloc>
485 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
487 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
492 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
493 typedef typename _RopeRep::allocator_type allocator_type;
494 _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
495 : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
496 (max)(__l->_M_depth, __r->_M_depth) + 1, false,
497 __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
499 # ifdef _STLP_NO_ARROW_OPERATOR
500 _Rope_RopeConcatenation() {}
501 _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
504 ~_Rope_RopeConcatenation() {
505 this->_M_free_c_string();
506 _M_left->_M_unref_nonnil();
507 _M_right->_M_unref_nonnil();
511 template <class _CharT, class _Alloc>
512 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
514 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
516 char_producer<_CharT>* _M_fn;
518 * Char_producer is owned by the
519 * rope and should be explicitly
520 * deleted when the rope becomes
523 bool _M_delete_when_done;
524 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
525 typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
526 # ifdef _STLP_NO_ARROW_OPERATOR
527 _Rope_RopeFunction() {}
528 _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
531 _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
532 bool __d, allocator_type __a)
533 : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
534 , _M_delete_when_done(__d)
535 { _STLP_ASSERT(_p_size > 0) }
537 ~_Rope_RopeFunction() {
538 this->_M_free_c_string();
539 if (_M_delete_when_done) {
546 * Substring results are usually represented using just
547 * concatenation nodes. But in the case of very long flat ropes
548 * or ropes with a functional representation that isn't practical.
549 * In that case, we represent the __result as a special case of
550 * RopeFunction, whose char_producer points back to the rope itself.
551 * In all cases except repeated substring operations and
552 * deallocation, we treat the __result as a RopeFunction.
554 template<class _CharT, class _Alloc>
555 struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
557 // XXX this whole class should be rewritten.
558 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
559 _RopeRep *_M_base; // not 0
561 /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
563 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
564 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
565 switch (_M_base->_M_tag) {
566 case _RopeRep::_S_function:
567 case _RopeRep::_S_substringfn:
569 char_producer<_CharT>* __fn =
570 __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
571 _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
572 _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
573 (*__fn)(__start_pos + _M_start, __req_len, __buffer);
576 case _RopeRep::_S_leaf:
579 __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
580 _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
589 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
590 typedef typename _RopeRep::allocator_type allocator_type;
592 _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
593 : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
594 _M_base(__b), _M_start(__s) {
595 _STLP_ASSERT(__l > 0)
596 _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
597 _M_base->_M_ref_nonnil();
598 this->_M_tag = _RopeRep::_S_substringfn;
600 virtual ~_Rope_RopeSubstring()
601 { _M_base->_M_unref_nonnil(); }
605 * Self-destructing pointers to Rope_rep.
606 * These are not conventional smart pointers. Their
607 * only purpose in life is to ensure that unref is called
608 * on the pointer either at normal exit or if an exception
609 * is raised. It is the caller's responsibility to
610 * adjust reference counts when these pointers are initialized
611 * or assigned to. (This convention significantly reduces
612 * the number of potentially expensive reference count
615 template<class _CharT, class _Alloc>
616 struct _Rope_self_destruct_ptr {
617 _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
618 ~_Rope_self_destruct_ptr()
619 { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
620 # ifdef _STLP_USE_EXCEPTIONS
621 _Rope_self_destruct_ptr() : _M_ptr(0) {}
623 _Rope_self_destruct_ptr() {}
625 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
626 _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
627 _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
628 operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
629 _Rope_self_destruct_ptr<_CharT, _Alloc>&
630 operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
631 { _M_ptr = __x; return *this; }
635 * Dereferencing a nonconst iterator has to return something
636 * that behaves almost like a reference. It's not possible to
637 * return an actual reference since assignment requires extra
638 * work. And we would get into the same problems as with the
639 * CD2 version of basic_string.
641 template<class _CharT, class _Alloc>
642 class _Rope_char_ref_proxy {
643 typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
644 friend class rope<_CharT,_Alloc>;
645 friend class _Rope_iterator<_CharT,_Alloc>;
646 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
647 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
648 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
649 typedef rope<_CharT,_Alloc> _My_rope;
652 bool _M_current_valid;
653 _My_rope* _M_root; // The whole rope.
655 _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
656 _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
657 _Rope_char_ref_proxy(const _Self& __x) :
658 _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
659 // Don't preserve cache if the reference can outlive the
660 // expression. We claim that's not possible without calling
661 // a copy constructor or generating reference to a proxy
662 // reference. We declare the latter to have undefined semantics.
663 _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
664 : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
665 inline operator _CharT () const;
666 _Self& operator= (_CharT __c);
667 _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
668 _Self& operator= (const _Self& __c) {
669 return operator=((_CharT)__c);
673 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
674 template<class _CharT, class __Alloc>
675 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
676 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
682 // There is no really acceptable way to handle this. The default
683 // definition of swap doesn't work for proxy references.
684 // It can't really be made to work, even with ugly hacks, since
685 // the only unusual operation it uses is the copy constructor, which
686 // is needed for other purposes. We provide a macro for
687 // full specializations, and instantiate the most common case.
688 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
689 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
690 _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
691 _CharT __tmp = __a; \
696 _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
698 # ifndef _STLP_NO_WCHAR_T
699 _ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
702 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
704 template<class _CharT, class _Alloc>
705 class _Rope_char_ptr_proxy {
706 // XXX this class should be rewritten.
708 typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
709 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
711 rope<_CharT,_Alloc>* _M_root; // The whole rope.
713 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
714 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
715 _Rope_char_ptr_proxy(const _Self& __x)
716 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
717 _Rope_char_ptr_proxy() {}
718 _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
719 _STLP_ASSERT(0 == __x)
721 _Self& operator= (const _Self& __x) {
723 _M_root = __x._M_root;
727 _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
728 return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
735 * Unlike in the C version, we cache only part of the stack
736 * for rope iterators, since they must be efficiently copyable.
737 * When we run out of cache, we have to reconstruct the iterator
739 * Pointers from iterators are not included in reference counts.
740 * Iterators are assumed to be thread private. Ropes can
743 template<class _CharT, class _Alloc>
744 class _Rope_iterator_base
745 /* : public random_access_iterator<_CharT, ptrdiff_t> */
747 friend class rope<_CharT,_Alloc>;
748 typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
749 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
751 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
753 enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
754 enum { _S_iterator_buf_len = 15 };
755 size_t _M_current_pos;
758 // Starting position for current leaf
760 // Buffer possibly containing current char.
761 _CharT* _M_buf_start;
762 // Pointer to current char in buffer, != 0 ==> buffer valid.
764 // One past __last valid char in buffer.
767 // What follows is the path cache. We go out of our
768 // way to make this compact.
769 // Path_end contains the bottom section of the path from
770 // the root to the current leaf.
772 # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
773 _RopeRep const*_M_data[4];
775 _RopeRep const*_M_data[_S_path_cache_len];
778 // Last valid __pos in path_end;
779 // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
780 // point to concatenation nodes.
782 // (_M_path_directions >> __i) & 1 is 1
783 // if we got from _M_path_end[leaf_index - __i - 1]
784 // to _M_path_end[leaf_index - __i] by going to the
785 // __right. Assumes path_cache_len <= 9.
786 unsigned char _M_path_directions;
787 // Short buffer for surrounding chars.
788 // This is useful primarily for
789 // RopeFunctions. We put the buffer
790 // here to avoid locking in the
791 // multithreaded case.
792 // The cached path is generally assumed to be valid
793 // only if the buffer is valid.
795 # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
798 _CharT _M_data[_S_iterator_buf_len];
802 // Set buffer contents given path cache.
803 static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
804 // Set buffer contents and path cache.
805 static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
806 // As above, but assumes path cache is valid for previous posn.
807 static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
808 _Rope_iterator_base() {}
809 _Rope_iterator_base(_RopeRep* __root, size_t __pos)
810 : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
811 void _M_incr(size_t __n);
812 void _M_decr(size_t __n);
814 size_t index() const { return _M_current_pos; }
815 void _M_copy_buf(const _Self& __x) {
816 _M_tmp_buf = __x._M_tmp_buf;
817 if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
818 _M_buf_start = _M_tmp_buf._M_data;
819 _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
820 _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
824 _Rope_iterator_base(const _Self& __x)
825 : _M_current_pos(__x._M_current_pos), _M_root(__x._M_root),
826 _M_buf_start(__x._M_buf_start), _M_buf_ptr(__x._M_buf_ptr), _M_path_end(__x._M_path_end),
827 _M_leaf_index(__x._M_leaf_index), _M_path_directions(__x._M_path_directions) {
828 if (0 != __x._M_buf_ptr) {
832 _Self& operator = (const _Self& __x) {
833 _M_current_pos = __x._M_current_pos;
834 _M_root = __x._M_root;
835 _M_buf_start = __x._M_buf_start;
836 _M_buf_ptr = __x._M_buf_ptr;
837 _M_path_end = __x._M_path_end;
838 _M_leaf_index = __x._M_leaf_index;
839 _M_path_directions = __x._M_path_directions;
840 if (0 != __x._M_buf_ptr) {
847 template<class _CharT, class _Alloc> class _Rope_iterator;
849 template<class _CharT, class _Alloc>
850 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
851 friend class rope<_CharT,_Alloc>;
852 typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
853 typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
856 # ifndef _STLP_HAS_NO_NAMESPACES
857 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
858 // The one from the base class may not be directly visible.
860 _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
861 _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
862 // Only nonconst iterators modify root ref count
865 typedef _CharT reference; // Really a value. Returning a reference
866 // Would be a mess, since it would have
867 // to be included in refcount.
868 typedef const _CharT* pointer;
869 typedef _CharT value_type;
870 typedef ptrdiff_t difference_type;
871 typedef random_access_iterator_tag iterator_category;
874 _Rope_const_iterator() {}
875 _Rope_const_iterator(const _Self& __x) :
876 _Rope_iterator_base<_CharT,_Alloc>(__x) { }
877 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
878 _Rope_iterator_base<_CharT,_Alloc>(__x) {}
879 _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
880 _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
881 _Self& operator= (const _Self& __x) {
882 _Base::operator=(__x);
885 reference operator*() {
886 if (0 == this->_M_buf_ptr)
887 #if !defined (__DMC__)
890 { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
892 return *(this->_M_buf_ptr);
894 _Self& operator++() {
896 if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
897 this->_M_buf_ptr = __next;
898 ++this->_M_current_pos;
904 _Self& operator+=(ptrdiff_t __n) {
912 _Self& operator--() {
916 _Self& operator-=(ptrdiff_t __n) {
924 _Self operator++(int) {
925 size_t __old_pos = this->_M_current_pos;
927 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
928 // This makes a subsequent dereference expensive.
929 // Perhaps we should instead copy the iterator
930 // if it has a valid cache?
932 _Self operator--(int) {
933 size_t __old_pos = this->_M_current_pos;
935 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
937 inline reference operator[](size_t __n);
940 template<class _CharT, class _Alloc>
941 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
942 friend class rope<_CharT,_Alloc>;
943 typedef _Rope_iterator<_CharT, _Alloc> _Self;
944 typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
945 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
948 rope<_CharT,_Alloc>* _M_root_rope;
949 // root is treated as a cached version of this,
950 // and is used to detect changes to the underlying
952 // Root is included in the reference count.
953 // This is necessary so that we can detect changes reliably.
954 // Unfortunately, it requires careful bookkeeping for the
956 _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
960 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
961 typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
962 typedef _CharT value_type;
963 typedef ptrdiff_t difference_type;
964 typedef random_access_iterator_tag iterator_category;
966 ~_Rope_iterator() { //*TY 5/6/00 - added dtor to balance reference count
967 _RopeRep::_S_unref(this->_M_root);
970 rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
972 this->_M_root = 0; // Needed for reference counting.
974 _Rope_iterator(const _Self& __x) :
975 _Rope_iterator_base<_CharT,_Alloc>(__x) {
976 _M_root_rope = __x._M_root_rope;
977 _RopeRep::_S_ref(this->_M_root);
979 _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
980 _Self& operator= (const _Self& __x) {
981 _RopeRep* __old = this->_M_root;
982 _RopeRep::_S_ref(__x._M_root);
983 _Base::operator=(__x);
984 _M_root_rope = __x._M_root_rope;
985 _RopeRep::_S_unref(__old);
988 reference operator*() {
990 if (0 == this->_M_buf_ptr) {
991 return reference(_M_root_rope, this->_M_current_pos);
993 return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
996 _Self& operator++() {
1000 _Self& operator+=(ptrdiff_t __n) {
1004 this->_M_decr(-__n);
1008 _Self& operator--() {
1012 _Self& operator-=(ptrdiff_t __n) {
1016 this->_M_incr(-__n);
1020 _Self operator++(int) {
1021 size_t __old_pos = this->_M_current_pos;
1023 return _Self(_M_root_rope, __old_pos);
1025 _Self operator--(int) {
1026 size_t __old_pos = this->_M_current_pos;
1028 return _Self(_M_root_rope, __old_pos);
1030 reference operator[](ptrdiff_t __n) {
1031 return reference(_M_root_rope, this->_M_current_pos + __n);
1035 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1036 template <class _CharT, class _Alloc>
1037 inline random_access_iterator_tag
1038 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
1039 template <class _CharT, class _Alloc>
1040 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1041 template <class _CharT, class _Alloc>
1042 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1043 template <class _CharT, class _Alloc>
1044 inline random_access_iterator_tag
1045 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1046 template <class _CharT, class _Alloc>
1047 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1048 template <class _CharT, class _Alloc>
1049 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1050 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
1052 template <class _CharT, class _Alloc, class _CharConsumer>
1053 bool _S_apply_to_pieces(_CharConsumer& __c,
1054 _Rope_RopeRep<_CharT, _Alloc> *__r,
1055 size_t __begin, size_t __end);
1056 // begin and end are assumed to be in range.
1058 template <class _CharT, class _Alloc>
1060 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
1061 : public __stlport_class<rope<_CharT, _Alloc> >
1064 typedef rope<_CharT,_Alloc> _Self;
1066 typedef _CharT value_type;
1067 typedef ptrdiff_t difference_type;
1068 typedef size_t size_type;
1069 typedef _CharT const_reference;
1070 typedef const _CharT* const_pointer;
1071 typedef _Rope_iterator<_CharT,_Alloc> iterator;
1072 typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1073 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1074 typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1076 friend class _Rope_iterator<_CharT,_Alloc>;
1077 friend class _Rope_const_iterator<_CharT,_Alloc>;
1078 friend struct _Rope_RopeRep<_CharT,_Alloc>;
1079 friend class _Rope_iterator_base<_CharT,_Alloc>;
1080 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1081 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1082 friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1084 _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1087 typedef _CharT* _Cstrptr;
1089 static _CharT _S_empty_c_str[1];
1091 enum { _S_copy_max = 23 };
1092 // For strings shorter than _S_copy_max, we copy to
1095 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1096 typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
1099 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1100 typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1103 // The only data member of a rope:
1104 _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1107 allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1110 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1111 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1112 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1113 typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1115 // Retrieve a character at the indicated position.
1116 static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1118 // Obtain a pointer to the character at the indicated position.
1119 // The pointer can be used to change the character.
1120 // If such a pointer cannot be produced, as is frequently the
1121 // case, 0 is returned instead.
1122 // (Returns nonzero only if all nodes in the path have a refcount
1124 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1126 static void _S_unref(_RopeRep* __t) {
1127 _RopeRep::_S_unref(__t);
1129 static void _S_ref(_RopeRep* __t) {
1130 _RopeRep::_S_ref(__t);
1133 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1135 // _Result is counted in refcount.
1136 static _RopeRep* _S_substring(_RopeRep* __base,
1137 size_t __start, size_t __endp1);
1139 static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1140 const _CharT* __iter, size_t __slen);
1141 // Concatenate rope and char ptr, copying __s.
1142 // Should really take an arbitrary iterator.
1143 // Result is counted in refcount.
1144 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1145 const _CharT* __iter, size_t __slen);
1146 // As above, but one reference to __r is about to be
1147 // destroyed. Thus the pieces may be recycled if all
1148 // relevent reference counts are 1.
1150 // General concatenation on _RopeRep. _Result
1151 // has refcount of 1. Adjusts argument refcounts.
1152 static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1155 #if defined (_STLP_MEMBER_TEMPLATES)
1156 template <class _CharConsumer>
1158 typedef _Rope_char_consumer<_CharT> _CharConsumer;
1160 void apply_to_pieces(size_t __begin, size_t __end,
1161 _CharConsumer& __c) const
1162 { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
1166 static size_t _S_rounded_up_size(size_t __n)
1167 { return _RopeRep::_S_rounded_up_size(__n); }
1169 // Allocate and construct a RopeLeaf using the supplied allocator
1170 // Takes ownership of s instead of copying.
1171 static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
1172 size_t _p_size, allocator_type __a) {
1173 _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1174 _RopeLeaf).allocate(1);
1176 _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1178 _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1179 _RopeLeaf).deallocate(__space, 1))
1183 static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
1184 allocator_type __a) {
1185 _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1186 _RopeConcatenation).allocate(1);
1187 return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1190 static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1191 size_t _p_size, bool __d, allocator_type __a) {
1192 _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1193 _RopeFunction).allocate(1);
1194 return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1197 static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1198 size_t __l, allocator_type __a) {
1199 _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1200 _RopeSubstring).allocate(1);
1201 return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1205 _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1206 size_t _p_size, allocator_type __a) {
1207 if (0 == _p_size) return 0;
1209 _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1211 _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
1212 _S_construct_null(__buf + _p_size);
1215 return _S_new_RopeLeaf(__buf, _p_size, __a);
1217 _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1218 _STLP_RET_AFTER_THROW(0)
1222 // Concatenation of nonempty strings.
1223 // Always builds a concatenation node.
1224 // Rebalances if the result is too deep.
1225 // Result has refcount 1.
1226 // Does not increment left and right ref counts even though
1227 // they are referenced.
1229 _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1231 // Concatenation helper functions
1233 _S_leaf_concat_char_iter(_RopeLeaf* __r,
1234 const _CharT* __iter, size_t __slen);
1235 // Concatenate by copying leaf.
1236 // should take an arbitrary iterator
1237 // result has refcount 1.
1238 static _RopeLeaf* _S_destr_leaf_concat_char_iter
1239 (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1240 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1243 // A helper function for exponentiating strings.
1244 // This uses a nonstandard refcount convention.
1245 // The result has refcount 0.
1246 typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1247 #if !defined (__GNUC__) || (__GNUC__ < 3)
1250 friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
1254 static size_t _S_char_ptr_len(const _CharT* __s) {
1255 return char_traits<_CharT>::length(__s);
1258 public: /* for operators */
1259 rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1260 : _M_tree_ptr(__a, __t) { }
1262 // Copy __r to the _CharT buffer.
1263 // Returns __buffer + __r->_M_size._M_data.
1264 // Assumes that buffer is uninitialized.
1265 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1267 // Again, with explicit starting position and length.
1268 // Assumes that buffer is uninitialized.
1269 static _CharT* _S_flatten(_RopeRep* __r,
1270 size_t __start, size_t __len,
1273 // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1275 static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
1277 static bool _S_is_balanced(_RopeRep* __r)
1278 { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1280 static bool _S_is_almost_balanced(_RopeRep* __r) {
1281 return (__r->_M_depth == 0 ||
1282 __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
1285 static bool _S_is_roughly_balanced(_RopeRep* __r) {
1286 return (__r->_M_depth <= 1 ||
1287 __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
1290 // Assumes the result is not empty.
1291 static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1292 _RopeRep* __right) {
1293 _RopeRep* __result = _S_concat_rep(__left, __right);
1294 if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1298 // The basic rebalancing operation. Logically copies the
1299 // rope. The result has refcount of 1. The client will
1300 // usually decrement the reference count of __r.
1301 // The result is within height 2 of balanced by the above
1303 static _RopeRep* _S_balance(_RopeRep* __r);
1305 // Add all unbalanced subtrees to the forest of balanceed trees.
1306 // Used only by balance.
1307 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1309 // Add __r to forest, assuming __r is already balanced.
1310 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1313 // Print to stdout, exposing structure
1314 static void _S_dump(_RopeRep* __r, int __indent = 0);
1317 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1318 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1320 void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
1322 void _M_reset(_RopeRep* __r) {
1323 //if (__r != _M_tree_ptr._M_data) {
1324 _S_unref(_M_tree_ptr._M_data);
1325 _M_tree_ptr._M_data = __r;
1330 bool empty() const { return 0 == _M_tree_ptr._M_data; }
1332 // Comparison member function. This is public only for those
1333 // clients that need a ternary comparison. Others
1334 // should use the comparison operators below.
1335 int compare(const _Self& __y) const {
1336 return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1339 rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1340 : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
1343 rope(const _CharT* __s, size_t __len,
1344 const allocator_type& __a = allocator_type())
1345 : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
1348 // Should perhaps be templatized with respect to the iterator type
1349 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1351 rope(const _CharT *__s, const _CharT *__e,
1352 const allocator_type& __a = allocator_type())
1353 : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
1356 rope(const const_iterator& __s, const const_iterator& __e,
1357 const allocator_type& __a = allocator_type())
1358 : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1359 __e._M_current_pos))
1362 rope(const iterator& __s, const iterator& __e,
1363 const allocator_type& __a = allocator_type())
1364 : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1365 __e._M_current_pos))
1368 rope(_CharT __c, const allocator_type& __a = allocator_type())
1369 : _M_tree_ptr(__a, (_RopeRep*)0) {
1370 _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1372 _Copy_Construct(__buf, __c);
1373 _S_construct_null(__buf + 1);
1376 _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1378 _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1381 rope(size_t __n, _CharT __c,
1382 const allocator_type& __a = allocator_type()):
1383 _M_tree_ptr(__a, (_RopeRep*)0) {
1387 rope<_CharT,_Alloc> __result;
1388 # define __exponentiate_threshold size_t(32)
1389 _RopeRep* __remainder;
1390 rope<_CharT,_Alloc> __remainder_rope;
1393 typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1395 size_t __exponent = __n / __exponentiate_threshold;
1396 size_t __rest = __n % __exponentiate_threshold;
1400 _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1401 uninitialized_fill_n(__rest_buffer, __rest, __c);
1402 _S_construct_null(__rest_buffer + __rest);
1404 __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1406 _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1408 __remainder_rope._M_tree_ptr._M_data = __remainder;
1409 if (__exponent != 0) {
1410 _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1411 _RopeLeaf* __base_leaf;
1412 rope<_CharT,_Alloc> __base_rope;
1413 uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1414 _S_construct_null(__base_buffer + __exponentiate_threshold);
1416 __base_leaf = _S_new_RopeLeaf(__base_buffer,
1417 __exponentiate_threshold, __a);
1419 _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1420 __exponentiate_threshold, __a))
1421 __base_rope._M_tree_ptr._M_data = __base_leaf;
1422 if (1 == __exponent) {
1423 __result = __base_rope;
1424 // One each for base_rope and __result
1425 //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1427 __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
1429 if (0 != __remainder) {
1430 __result += __remainder_rope;
1433 __result = __remainder_rope;
1435 _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
1436 _M_tree_ptr._M_data->_M_ref_nonnil();
1437 # undef __exponentiate_threshold
1440 rope(const allocator_type& __a = allocator_type())
1441 : _M_tree_ptr(__a, (_RopeRep*)0) {}
1443 // Construct a rope from a function that can compute its members
1444 rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1445 const allocator_type& __a = allocator_type())
1446 : _M_tree_ptr(__a, (_RopeRep*)0) {
1447 _M_tree_ptr._M_data = (0 == __len) ?
1448 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1451 rope(const _Self& __x)
1452 : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
1453 _S_ref(_M_tree_ptr._M_data);
1456 rope(__move_source<_Self> __src)
1457 : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
1458 __src.get()._M_tree_ptr._M_data = 0;
1462 _S_unref(_M_tree_ptr._M_data);
1465 _Self& operator=(const _Self& __x) {
1466 _STLP_ASSERT(get_allocator() == __x.get_allocator())
1467 _S_ref(__x._M_tree_ptr._M_data);
1468 _M_reset(__x._M_tree_ptr._M_data);
1473 _S_unref(_M_tree_ptr._M_data);
1474 _M_tree_ptr._M_data = 0;
1476 void push_back(_CharT __x) {
1477 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
1481 _RopeRep* __old = _M_tree_ptr._M_data;
1482 _M_tree_ptr._M_data =
1483 _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
1487 _CharT back() const {
1488 return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
1491 void push_front(_CharT __x) {
1492 _RopeRep* __old = _M_tree_ptr._M_data;
1494 _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
1496 _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
1500 _STLP_UNWIND(_S_unref(__left))
1504 _RopeRep* __old = _M_tree_ptr._M_data;
1505 _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
1509 _CharT front() const {
1510 return _S_fetch(_M_tree_ptr._M_data, 0);
1514 _RopeRep* __old = _M_tree_ptr._M_data;
1515 _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
1519 void copy(_CharT* __buffer) const {
1520 _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
1521 _S_flatten(_M_tree_ptr._M_data, __buffer);
1525 * This is the copy function from the standard, but
1526 * with the arguments reordered to make it consistent with the
1527 * rest of the interface.
1528 * Note that this guaranteed not to compile if the draft standard
1531 size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
1532 size_t _p_size = size();
1533 size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
1535 _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
1536 _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
1541 // Print to stdout, exposing structure. May be useful for
1542 // performance debugging.
1544 _S_dump(_M_tree_ptr._M_data);
1548 // Convert to 0 terminated string in new allocated memory.
1549 // Embedded 0s in the input do not terminate the copy.
1550 const _CharT* c_str() const;
1552 // As above, but also use the flattened representation as the
1553 // the new rope representation.
1554 const _CharT* replace_with_c_str();
1556 // Reclaim memory for the c_str generated flattened string.
1557 // Intentionally undocumented, since it's hard to say when this
1558 // is safe for multiple threads.
1559 void delete_c_str () {
1560 if (0 == _M_tree_ptr._M_data) return;
1561 if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
1562 ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
1563 _M_tree_ptr._M_data->_M_c_string) {
1564 // Representation shared
1567 _M_tree_ptr._M_data->_M_free_c_string();
1568 _M_tree_ptr._M_data->_M_c_string = 0;
1571 _CharT operator[] (size_type __pos) const {
1572 return _S_fetch(_M_tree_ptr._M_data, __pos);
1575 _CharT at(size_type __pos) const {
1576 if (__pos >= size()) _M_throw_out_of_range();
1577 return (*this)[__pos];
1580 const_iterator begin() const {
1581 return(const_iterator(_M_tree_ptr._M_data, 0));
1584 // An easy way to get a const iterator from a non-const container.
1585 const_iterator const_begin() const {
1586 return(const_iterator(_M_tree_ptr._M_data, 0));
1589 const_iterator end() const {
1590 return(const_iterator(_M_tree_ptr._M_data, size()));
1593 const_iterator const_end() const {
1594 return(const_iterator(_M_tree_ptr._M_data, size()));
1597 size_type size() const {
1598 return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
1601 size_type length() const {
1605 size_type max_size() const {
1606 return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
1607 // Guarantees that the result can be sufficiently
1608 // balanced. Longer ropes will probably still work,
1609 // but it's harder to make guarantees.
1612 const_reverse_iterator rbegin() const {
1613 return const_reverse_iterator(end());
1616 const_reverse_iterator const_rbegin() const {
1617 return const_reverse_iterator(end());
1620 const_reverse_iterator rend() const {
1621 return const_reverse_iterator(begin());
1624 const_reverse_iterator const_rend() const {
1625 return const_reverse_iterator(begin());
1627 // The symmetric cases are intentionally omitted, since they're presumed
1628 // to be less common, and we don't handle them as well.
1630 // The following should really be templatized.
1631 // The first argument should be an input iterator or
1632 // forward iterator with value_type _CharT.
1633 _Self& append(const _CharT* __iter, size_t __n) {
1634 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
1638 _Self& append(const _CharT* __c_string) {
1639 size_t __len = _S_char_ptr_len(__c_string);
1640 append(__c_string, __len);
1644 _Self& append(const _CharT* __s, const _CharT* __e) {
1645 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
1649 _Self& append(const_iterator __s, const_iterator __e) {
1650 _STLP_ASSERT(__s._M_root == __e._M_root)
1651 _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
1652 _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
1653 _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
1657 _Self& append(_CharT __c) {
1658 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
1662 _Self& append() { return append(_CharT()); } // XXX why?
1664 _Self& append(const _Self& __y) {
1665 _STLP_ASSERT(__y.get_allocator() == get_allocator())
1666 _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
1670 _Self& append(size_t __n, _CharT __c) {
1671 rope<_CharT,_Alloc> __last(__n, __c);
1672 return append(__last);
1675 void swap(_Self& __b) {
1676 _M_tree_ptr.swap(__b._M_tree_ptr);
1680 // Result is included in refcount.
1681 static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1682 size_t __pos2, _RopeRep* __r) {
1683 if (0 == __old) { _S_ref(__r); return __r; }
1684 _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
1685 _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
1686 _STLP_MPWFIX_TRY //*TY 06/01/2000 -
1690 __result = _S_concat_rep(__left, __right);
1692 _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
1693 _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
1694 __result = _S_concat_rep(__left_result, __right);
1697 _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
1701 void insert(size_t __p, const _Self& __r) {
1702 if (__p > size()) _M_throw_out_of_range();
1703 _STLP_ASSERT(get_allocator() == __r.get_allocator())
1704 _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
1707 void insert(size_t __p, size_t __n, _CharT __c) {
1708 rope<_CharT,_Alloc> __r(__n,__c);
1712 void insert(size_t __p, const _CharT* __i, size_t __n) {
1713 if (__p > size()) _M_throw_out_of_range();
1714 _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
1715 _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
1716 _Self_destruct_ptr __left_result(
1717 _S_concat_char_iter(__left, __i, __n));
1718 // _S_ destr_concat_char_iter should be safe here.
1719 // But as it stands it's probably not a win, since __left
1720 // is likely to have additional references.
1721 _M_reset(_S_concat_rep(__left_result, __right));
1724 void insert(size_t __p, const _CharT* __c_string) {
1725 insert(__p, __c_string, _S_char_ptr_len(__c_string));
1728 void insert(size_t __p, _CharT __c) {
1729 insert(__p, &__c, 1);
1732 void insert(size_t __p) {
1733 _CharT __c = _CharT();
1734 insert(__p, &__c, 1);
1737 void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1738 _Self __r(__i, __j);
1742 void insert(size_t __p, const const_iterator& __i,
1743 const const_iterator& __j) {
1744 _Self __r(__i, __j);
1748 void insert(size_t __p, const iterator& __i,
1749 const iterator& __j) {
1750 _Self __r(__i, __j);
1754 // (position, length) versions of replace operations:
1755 void replace(size_t __p, size_t __n, const _Self& __r) {
1756 if (__p > size()) _M_throw_out_of_range();
1757 _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
1760 void replace(size_t __p, size_t __n,
1761 const _CharT* __i, size_t __i_len) {
1762 _Self __r(__i, __i_len);
1763 replace(__p, __n, __r);
1766 void replace(size_t __p, size_t __n, _CharT __c) {
1768 replace(__p, __n, __r);
1771 void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1772 _Self __r(__c_string);
1773 replace(__p, __n, __r);
1776 void replace(size_t __p, size_t __n,
1777 const _CharT* __i, const _CharT* __j) {
1778 _Self __r(__i, __j);
1779 replace(__p, __n, __r);
1782 void replace(size_t __p, size_t __n,
1783 const const_iterator& __i, const const_iterator& __j) {
1784 _Self __r(__i, __j);
1785 replace(__p, __n, __r);
1788 void replace(size_t __p, size_t __n,
1789 const iterator& __i, const iterator& __j) {
1790 _Self __r(__i, __j);
1791 replace(__p, __n, __r);
1794 // Single character variants:
1795 void replace(size_t __p, _CharT __c) {
1796 if (__p > size()) _M_throw_out_of_range();
1797 iterator __i(this, __p);
1801 void replace(size_t __p, const _Self& __r) {
1802 replace(__p, 1, __r);
1805 void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1806 replace(__p, 1, __i, __i_len);
1809 void replace(size_t __p, const _CharT* __c_string) {
1810 replace(__p, 1, __c_string);
1813 void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1814 replace(__p, 1, __i, __j);
1817 void replace(size_t __p, const const_iterator& __i,
1818 const const_iterator& __j) {
1819 replace(__p, 1, __i, __j);
1822 void replace(size_t __p, const iterator& __i,
1823 const iterator& __j) {
1824 replace(__p, 1, __i, __j);
1827 // Erase, (position, size) variant.
1828 void erase(size_t __p, size_t __n) {
1829 if (__p > size()) _M_throw_out_of_range();
1830 _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
1833 // Erase, single character
1834 void erase(size_t __p) {
1835 erase(__p, __p + 1);
1838 // Insert, iterator variants.
1839 iterator insert(const iterator& __p, const _Self& __r)
1840 { insert(__p.index(), __r); return __p; }
1841 iterator insert(const iterator& __p, size_t __n, _CharT __c)
1842 { insert(__p.index(), __n, __c); return __p; }
1843 iterator insert(const iterator& __p, _CharT __c)
1844 { insert(__p.index(), __c); return __p; }
1845 iterator insert(const iterator& __p )
1846 { insert(__p.index()); return __p; }
1847 iterator insert(const iterator& __p, const _CharT* c_string)
1848 { insert(__p.index(), c_string); return __p; }
1849 iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1850 { insert(__p.index(), __i, __n); return __p; }
1851 iterator insert(const iterator& __p, const _CharT* __i,
1853 { insert(__p.index(), __i, __j); return __p; }
1854 iterator insert(const iterator& __p,
1855 const const_iterator& __i, const const_iterator& __j)
1856 { insert(__p.index(), __i, __j); return __p; }
1857 iterator insert(const iterator& __p,
1858 const iterator& __i, const iterator& __j)
1859 { insert(__p.index(), __i, __j); return __p; }
1861 // Replace, range variants.
1862 void replace(const iterator& __p, const iterator& __q,
1864 { replace(__p.index(), __q.index() - __p.index(), __r); }
1865 void replace(const iterator& __p, const iterator& __q, _CharT __c)
1866 { replace(__p.index(), __q.index() - __p.index(), __c); }
1867 void replace(const iterator& __p, const iterator& __q,
1868 const _CharT* __c_string)
1869 { replace(__p.index(), __q.index() - __p.index(), __c_string); }
1870 void replace(const iterator& __p, const iterator& __q,
1871 const _CharT* __i, size_t __n)
1872 { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
1873 void replace(const iterator& __p, const iterator& __q,
1874 const _CharT* __i, const _CharT* __j)
1875 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1876 void replace(const iterator& __p, const iterator& __q,
1877 const const_iterator& __i, const const_iterator& __j)
1878 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1879 void replace(const iterator& __p, const iterator& __q,
1880 const iterator& __i, const iterator& __j)
1881 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1883 // Replace, iterator variants.
1884 void replace(const iterator& __p, const _Self& __r)
1885 { replace(__p.index(), __r); }
1886 void replace(const iterator& __p, _CharT __c)
1887 { replace(__p.index(), __c); }
1888 void replace(const iterator& __p, const _CharT* __c_string)
1889 { replace(__p.index(), __c_string); }
1890 void replace(const iterator& __p, const _CharT* __i, size_t __n)
1891 { replace(__p.index(), __i, __n); }
1892 void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
1893 { replace(__p.index(), __i, __j); }
1894 void replace(const iterator& __p, const_iterator __i,
1896 { replace(__p.index(), __i, __j); }
1897 void replace(const iterator& __p, iterator __i, iterator __j)
1898 { replace(__p.index(), __i, __j); }
1900 // Iterator and range variants of erase
1901 iterator erase(const iterator& __p, const iterator& __q) {
1902 size_t __p_index = __p.index();
1903 erase(__p_index, __q.index() - __p_index);
1904 return iterator(this, __p_index);
1906 iterator erase(const iterator& __p) {
1907 size_t __p_index = __p.index();
1908 erase(__p_index, 1);
1909 return iterator(this, __p_index);
1912 _Self substr(size_t __start, size_t __len = 1) const {
1913 if (__start > size()) _M_throw_out_of_range();
1914 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
1917 _Self substr(iterator __start, iterator __end) const {
1918 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1921 _Self substr(iterator __start) const {
1922 size_t __pos = __start.index();
1923 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1926 _Self substr(const_iterator __start, const_iterator __end) const {
1927 // This might eventually take advantage of the cache in the
1929 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1932 rope<_CharT,_Alloc> substr(const_iterator __start) {
1933 size_t __pos = __start.index();
1934 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1937 #include <stl/_string_npos.h>
1939 size_type find(const _Self& __s, size_type __pos = 0) const {
1940 if (__pos >= size())
1941 # ifndef _STLP_OLD_ROPE_SEMANTICS
1947 size_type __result_pos;
1948 const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
1949 __result_pos = __result.index();
1950 # ifndef _STLP_OLD_ROPE_SEMANTICS
1951 if (__result_pos == size()) __result_pos = npos;
1953 return __result_pos;
1955 size_type find(_CharT __c, size_type __pos = 0) const;
1956 size_type find(const _CharT* __s, size_type __pos = 0) const {
1957 size_type __result_pos;
1958 const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
1959 __s, __s + _S_char_ptr_len(__s));
1960 __result_pos = __result.index();
1961 # ifndef _STLP_OLD_ROPE_SEMANTICS
1962 if (__result_pos == size()) __result_pos = npos;
1964 return __result_pos;
1967 iterator mutable_begin() {
1968 return(iterator(this, 0));
1971 iterator mutable_end() {
1972 return(iterator(this, size()));
1975 reverse_iterator mutable_rbegin() {
1976 return reverse_iterator(mutable_end());
1979 reverse_iterator mutable_rend() {
1980 return reverse_iterator(mutable_begin());
1983 reference mutable_reference_at(size_type __pos) {
1984 return reference(this, __pos);
1988 reference operator[] (size_type __pos) {
1989 return reference(this, __pos);
1992 reference at(size_type __pos) {
1993 if (__pos >= size()) _M_throw_out_of_range();
1994 return (*this)[__pos];
1997 void resize(size_type, _CharT) {}
1998 void resize(size_type) {}
1999 void reserve(size_type = 0) {}
2000 size_type capacity() const {
2004 // Stuff below this line is dangerous because it's error prone.
2005 // I would really like to get rid of it.
2006 // copy function with funny arg ordering.
2007 size_type copy(_CharT* __buffer, size_type __n,
2008 size_type __pos = 0) const {
2009 return copy(__pos, __n, __buffer);
2012 iterator end() { return mutable_end(); }
2014 iterator begin() { return mutable_begin(); }
2016 reverse_iterator rend() { return mutable_rend(); }
2018 reverse_iterator rbegin() { return mutable_rbegin(); }
2022 const_iterator end() { return const_end(); }
2024 const_iterator begin() { return const_begin(); }
2026 const_reverse_iterator rend() { return const_rend(); }
2028 const_reverse_iterator rbegin() { return const_rbegin(); }
2033 #if !defined (_STLP_STATIC_CONST_INIT_BUG)
2034 # if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
2035 template <class _CharT, class _Alloc>
2036 const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
2040 template <class _CharT, class _Alloc>
2042 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
2043 { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
2045 template <class _CharT, class _Alloc>
2046 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2047 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2048 return (__x._M_current_pos == __y._M_current_pos &&
2049 __x._M_root == __y._M_root);
2052 template <class _CharT, class _Alloc>
2053 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2054 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2055 { return (__x._M_current_pos < __y._M_current_pos); }
2057 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2059 template <class _CharT, class _Alloc>
2060 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2061 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2062 { return !(__x == __y); }
2064 template <class _CharT, class _Alloc>
2065 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2066 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2067 { return __y < __x; }
2069 template <class _CharT, class _Alloc>
2070 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2071 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2072 { return !(__y < __x); }
2074 template <class _CharT, class _Alloc>
2075 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2076 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2077 { return !(__x < __y); }
2079 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2081 template <class _CharT, class _Alloc>
2082 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2083 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2084 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
2086 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2087 template <class _CharT, class _Alloc>
2088 inline _Rope_const_iterator<_CharT,_Alloc>
2089 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
2090 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
2093 template <class _CharT, class _Alloc>
2094 inline _Rope_const_iterator<_CharT,_Alloc>
2095 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
2096 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
2098 template <class _CharT, class _Alloc>
2099 inline _Rope_const_iterator<_CharT,_Alloc>
2100 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
2101 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
2103 template <class _CharT, class _Alloc>
2104 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2105 const _Rope_iterator<_CharT,_Alloc>& __y) {
2106 return (__x._M_current_pos == __y._M_current_pos &&
2107 __x._M_root_rope == __y._M_root_rope);
2110 template <class _CharT, class _Alloc>
2111 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2112 const _Rope_iterator<_CharT,_Alloc>& __y)
2113 { return (__x._M_current_pos < __y._M_current_pos); }
2115 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
2116 template <class _CharT, class _Alloc>
2117 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
2118 const _Rope_iterator<_CharT,_Alloc>& __y)
2119 { return !(__x == __y); }
2121 template <class _CharT, class _Alloc>
2122 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
2123 const _Rope_iterator<_CharT,_Alloc>& __y)
2124 { return __y < __x; }
2126 template <class _CharT, class _Alloc>
2127 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
2128 const _Rope_iterator<_CharT,_Alloc>& __y)
2129 { return !(__y < __x); }
2131 template <class _CharT, class _Alloc>
2132 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
2133 const _Rope_iterator<_CharT,_Alloc>& __y)
2134 { return !(__x < __y); }
2135 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2137 template <class _CharT, class _Alloc>
2138 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2139 const _Rope_iterator<_CharT,_Alloc>& __y)
2140 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
2142 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2143 template <class _CharT, class _Alloc>
2144 inline _Rope_iterator<_CharT,_Alloc>
2145 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2147 return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
2151 template <class _CharT, class _Alloc>
2152 inline _Rope_iterator<_CharT,_Alloc>
2153 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2155 return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
2158 template <class _CharT, class _Alloc>
2159 inline _Rope_iterator<_CharT,_Alloc>
2160 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2161 return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
2164 template <class _CharT, class _Alloc>
2165 inline rope<_CharT,_Alloc>
2166 operator+ (const rope<_CharT,_Alloc>& __left,
2167 const rope<_CharT,_Alloc>& __right) {
2168 _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
2169 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
2170 // Inlining this should make it possible to keep __left and __right in registers.
2173 template <class _CharT, class _Alloc>
2174 inline rope<_CharT,_Alloc>&
2175 operator+= (rope<_CharT,_Alloc>& __left,
2176 const rope<_CharT,_Alloc>& __right) {
2177 __left.append(__right);
2181 template <class _CharT, class _Alloc>
2182 inline rope<_CharT,_Alloc>
2183 operator+ (const rope<_CharT,_Alloc>& __left,
2184 const _CharT* __right) {
2185 size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2186 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
2189 template <class _CharT, class _Alloc>
2190 inline rope<_CharT,_Alloc>&
2191 operator+= (rope<_CharT,_Alloc>& __left,
2192 const _CharT* __right) {
2193 __left.append(__right);
2197 template <class _CharT, class _Alloc>
2198 inline rope<_CharT,_Alloc>
2199 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
2200 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
2203 template <class _CharT, class _Alloc>
2204 inline rope<_CharT,_Alloc>&
2205 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
2206 __left.append(__right);
2210 template <class _CharT, class _Alloc>
2212 operator< (const rope<_CharT,_Alloc>& __left,
2213 const rope<_CharT,_Alloc>& __right) {
2214 return __left.compare(__right) < 0;
2217 template <class _CharT, class _Alloc>
2219 operator== (const rope<_CharT,_Alloc>& __left,
2220 const rope<_CharT,_Alloc>& __right) {
2221 return __left.compare(__right) == 0;
2224 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2226 template <class _CharT, class _Alloc>
2228 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2229 return !(__x == __y);
2232 template <class _CharT, class _Alloc>
2234 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2238 template <class _CharT, class _Alloc>
2240 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2241 return !(__y < __x);
2244 template <class _CharT, class _Alloc>
2246 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2247 return !(__x < __y);
2250 template <class _CharT, class _Alloc>
2251 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2252 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2253 return !(__x == __y);
2256 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2258 template <class _CharT, class _Alloc>
2259 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2260 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2261 return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2264 #if !defined (_STLP_USE_NO_IOSTREAMS)
2265 template<class _CharT, class _Traits, class _Alloc>
2266 basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
2267 const rope<_CharT, _Alloc>& __r);
2270 typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
2271 #if defined (_STLP_HAS_WCHAR_T)
2272 typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
2275 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2276 { return __c.mutable_reference_at(__i); }
2278 #if defined (_STLP_HAS_WCHAR_T)
2279 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2280 { return __c.mutable_reference_at(__i); }
2283 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
2284 template <class _CharT, class _Alloc>
2285 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
2289 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
2290 # ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
2291 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
2294 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
2297 // Hash functions should probably be revisited later:
2298 _STLP_TEMPLATE_NULL struct hash<crope> {
2299 size_t operator()(const crope& __str) const {
2300 size_t _p_size = __str.size();
2302 if (0 == _p_size) return 0;
2303 return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
2307 #if defined (_STLP_HAS_WCHAR_T) // dwa 8/21/97
2308 _STLP_TEMPLATE_NULL struct hash<wrope> {
2309 size_t operator()(const wrope& __str) const {
2310 size_t _p_size = __str.size();
2312 if (0 == _p_size) return 0;
2313 return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
2318 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
2319 // I couldn't get this to work with VC++
2320 template<class _CharT,class _Alloc>
2321 # if defined (__DMC__) && !defined (__PUT_STATIC_DATA_MEMBERS_HERE)
2324 void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
2325 _Rope_iterator<_CharT, _Alloc> __middle,
2326 _Rope_iterator<_CharT, _Alloc> __last);
2328 inline void rotate(_Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __first,
2329 _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __middle,
2330 _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __last)
2331 { _Rope_rotate(__first, __middle, __last); }
2334 template <class _CharT, class _Alloc>
2335 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
2336 if (_M_current_valid) {
2339 return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
2343 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION)
2344 template <class _CharT, class _Alloc>
2345 struct __move_traits<rope<_CharT, _Alloc> > {
2346 typedef __stlp_movable implemented;
2347 //Completness depends on the allocator:
2348 typedef typename __move_traits<_Alloc>::complete complete;
2354 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
2355 # include <stl/_rope.c>
2358 #endif /* _STLP_INTERNAL_ROPE_H */