2 * Portions Copyright (c) 2008 Nokia Corporation and/or its subsidiary(-ies). All rights reserved.
4 * Copyright (c) 1996,1997
5 * Silicon Graphics Computer Systems, Inc.
8 * Moscow Center for SPARC Technology
13 * This material is provided "as is", with absolutely no warranty expressed
14 * or implied. Any use is at your own risk.
16 * Permission to use or copy this software for any purpose is hereby granted
17 * without fee, provided the above notices are retained on all copies.
18 * Permission to modify the code and to distribute modified code is granted,
19 * provided the above notices are retained, and a notice that the code was
20 * modified is included with the above copyright notice.
24 /* NOTE: This is an internal header file, included by other STL headers.
25 * You should not attempt to use it directly.
28 // rope<_CharT,_Alloc> is a sequence of _CharT.
29 // Ropes appear to be mutable, but update operations
30 // really copy enough of the data structure to leave the original
31 // valid. Thus ropes can be logically copied by just copying
34 #ifndef _STLP_INTERNAL_ROPE_H
35 #define _STLP_INTERNAL_ROPE_H
37 #ifndef _STLP_INTERNAL_ALGOBASE_H
38 # include <stl/_algobase.h>
45 #ifndef _STLP_INTERNAL_ALLOC_H
46 # include <stl/_alloc.h>
49 #ifndef _STLP_INTERNAL_ITERATOR_H
50 # include <stl/_iterator.h>
53 #ifndef _STLP_INTERNAL_ALGO_H
54 # include <stl/_algo.h>
57 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
58 # include <stl/_function_base.h>
61 #ifndef _STLP_INTERNAL_NUMERIC_H
62 # include <stl/_numeric.h>
65 #ifndef _STLP_INTERNAL_HASH_FUN_H
66 # include <stl/_hash_fun.h>
69 #ifndef _STLP_CHAR_TRAITS_H
70 # include <stl/char_traits.h>
73 #ifndef _STLP_INTERNAL_THREADS_H
74 # include <stl/_threads.h>
77 #ifdef _STLP_SGI_THREADS
81 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
82 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
83 #elif defined(__MRC__)||defined(__SC__)
84 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
86 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
91 // First a lot of forward declarations. The standard seems to require
92 // much stricter "declaration before use" than many of the implementations
94 template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
95 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
96 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
97 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
98 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
99 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
100 template<class _CharT, class _Alloc> class _Rope_iterator;
101 template<class _CharT, class _Alloc> class _Rope_const_iterator;
102 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
103 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
105 _STLP_MOVE_TO_PRIV_NAMESPACE
107 // Some helpers, so we can use the power algorithm on ropes.
108 // See below for why this isn't local to the implementation.
110 // This uses a nonstandard refcount convention.
111 // The result has refcount 0.
112 template<class _CharT, class _Alloc>
113 struct _Rope_Concat_fn
114 : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
115 rope<_CharT,_Alloc> > {
116 rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
117 const rope<_CharT,_Alloc>& __y) {
122 template <class _CharT, class _Alloc>
125 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
126 { return rope<_CharT,_Alloc>(); }
128 _STLP_MOVE_TO_STD_NAMESPACE
131 template <class _CharT>
132 inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
135 template <class _CharT>
136 inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
137 { _STLP_STD::_Construct(__p); }
139 template <class _CharT>
140 inline void _S_construct_null(_CharT *__p) {
141 typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
142 _S_construct_null_aux(__p, _Char_Is_Integral());
145 // char_producers are logically functions that generate a section of
146 // a string. These can be converted to ropes. The resulting rope
147 // invokes the char_producer on demand. This allows, for example,
148 // files to be viewed as ropes without reading the entire file.
149 template <class _CharT>
150 class char_producer {
152 virtual ~char_producer() {}
153 virtual void operator()(size_t __start_pos, size_t __len,
154 _CharT* __buffer) = 0;
155 // Buffer should really be an arbitrary output iterator.
156 // That way we could flatten directly into an ostream, etc.
157 // This is thoroughly impossible, since iterator types don't
158 // have runtime descriptions.
163 // Sequence must provide an append operation that appends an
164 // array to the sequence. Sequence buffers are useful only if
165 // appending an entire array is cheaper than appending element by element.
166 // This is true for many string representations.
167 // This should perhaps inherit from ostream<sequence::value_type>
168 // and be implemented correspondingly, so that they can be used
169 // for formatted. For the sake of portability, we don't do this yet.
171 // For now, sequence buffers behave as output iterators. But they also
172 // behave a little like basic_ostringstream<sequence::value_type> and a
173 // little like containers.
175 template<class _Sequence
176 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
177 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
178 , size_t _Buf_sz = 100
179 # if defined(__sgi) && !defined(__GNUC__)
180 # define __TYPEDEF_WORKAROUND
181 ,class _V = typename _Sequence::value_type
183 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
185 // The 3rd parameter works around a common compiler bug.
186 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
188 # ifndef __TYPEDEF_WORKAROUND
189 typedef typename _Sequence::value_type value_type;
190 typedef sequence_buffer<_Sequence
191 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
192 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
195 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
197 enum { _Buf_sz = 100};
198 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
200 # else /* __TYPEDEF_WORKAROUND */
201 typedef _V value_type;
202 typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
203 # endif /* __TYPEDEF_WORKAROUND */
205 _Sequence* _M_prefix;
206 value_type _M_buffer[_Buf_sz];
210 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
213 ~sequence_buffer() { flush(); }
214 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
215 sequence_buffer(const _Self& __x) {
216 _M_prefix = __x._M_prefix;
217 _M_buf_count = __x._M_buf_count;
218 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
220 sequence_buffer(_Self& __x) {
222 _M_prefix = __x._M_prefix;
225 sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
226 _Self& operator= (_Self& __x) {
228 _M_prefix = __x._M_prefix;
232 _Self& operator= (const _Self& __x) {
233 _M_prefix = __x._M_prefix;
234 _M_buf_count = __x._M_buf_count;
235 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
238 void push_back(value_type __x) {
239 if (_M_buf_count < _Buf_sz) {
240 _M_buffer[_M_buf_count] = __x;
248 void append(const value_type *__s, size_t __len) {
249 if (__len + _M_buf_count <= _Buf_sz) {
250 size_t __i = _M_buf_count;
252 for (; __j < __len; __i++, __j++) {
253 _M_buffer[__i] = __s[__j];
255 _M_buf_count += __len;
256 } else if (0 == _M_buf_count) {
257 _M_prefix->append(__s, __s + __len);
263 _Self& write(const value_type *__s, size_t __len) {
267 _Self& put(value_type __x) {
271 _Self& operator=(const value_type& __rhs) {
275 _Self& operator*() { return *this; }
276 _Self& operator++() { return *this; }
277 _Self& operator++(int) { return *this; }
280 // The following should be treated as private, at least for now.
281 template<class _CharT>
282 class _Rope_char_consumer {
283 #if !defined (_STLP_MEMBER_TEMPLATES)
285 //Without member templates we have to use run-time parameterization.
286 // The symmetry with char_producer is accidental and temporary.
287 virtual ~_Rope_char_consumer() {}
288 virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
293 // What follows should really be local to rope. Unfortunately,
294 // that doesn't work, since it makes it impossible to define generic
295 // equality on rope iterators. According to the draft standard, the
296 // template parameters for such an equality operator cannot be inferred
297 // from the occurence of a member class as a parameter.
298 // (SGI compilers in fact allow this, but the __result wouldn't be
300 // Similarly, some of the static member functions are member functions
301 // only to avoid polluting the global namespace, and to circumvent
302 // restrictions on type inference for template functions.
306 // The internal data structure for representing a rope. This is
307 // private to the implementation. A rope is really just a pointer
310 // A few basic functions for manipulating this data structure
311 // are members of _RopeRep. Most of the more complex algorithms
312 // are implemented as rope members.
314 // Some of the static member functions of _RopeRep have identically
315 // named functions in rope that simply invoke the _RopeRep versions.
318 template<class _CharT, class _Alloc>
320 : public _Refcount_Base
322 typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
327 // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
328 // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
329 // but this addition causes the sunpro compiler to complain about
330 // multiple declarations during the initialization of "_S_min_len".
331 // Changed to be a fixed value and the sunpro compiler appears to
334 # define __ROPE_MAX_DEPTH 45
335 # define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
336 enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
337 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
338 // Apparently needed by VC++
339 // The data fields of leaves are allocated with some
340 // extra space, to accomodate future growth and for basic
341 // character types, to hold a trailing eos character.
342 enum { _S_alloc_granularity = 8 };
345 bool _M_is_balanced:8;
347 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
348 typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
350 allocator_type get_allocator() const { return allocator_type(_M_size); }
352 unsigned char _M_depth;
353 _CharT* _STLP_VOLATILE _M_c_string;
354 _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
356 # ifdef _STLP_NO_ARROW_OPERATOR
357 _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
360 /* Flattened version of string, if needed. */
362 /* If it's not 0, then the memory is owned */
364 /* In the case of a leaf, this may point to */
365 /* the same memory as the data field. */
366 _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
367 allocator_type __a) :
369 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
372 typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
373 # ifdef _STLP_HAS_WCHAR_T
374 typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
376 typedef __false_type _IsWCharT;
379 typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
382 /* Please tell why this code is necessary if you uncomment it.
383 * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
384 * returns a size > n in order to store the terminating null charater. When
385 * instanciation type is not a char or wchar_t this is not guaranty resulting in
388 static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
389 // Allow slop for in-place expansion.
390 return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
393 static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
394 // Allow slop for in-place expansion.
395 return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
398 // fbp : moved from RopeLeaf
399 static size_t _S_rounded_up_size(size_t __n)
400 //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
401 { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
403 static void _S_free_string( _CharT* __s, size_t __len,
404 allocator_type __a) {
405 _STLP_STD::_Destroy_Range(__s, __s + __len);
406 // This has to be a static member, so this gets a bit messy
407 # ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
408 __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
410 __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
414 // Deallocate data section of a leaf.
415 // This shouldn't be a member function.
416 // But its hard to do anything else at the
417 // moment, because it's templatized w.r.t.
419 // Does nothing if __GC is defined.
420 void _M_free_c_string();
422 // Deallocate t. Assumes t is not 0.
423 void _M_unref_nonnil() {
424 if (_M_decr() == 0) _M_free_tree();
426 void _M_ref_nonnil() {
429 static void _S_unref(_Self* __t) {
431 __t->_M_unref_nonnil();
434 static void _S_ref(_Self* __t) {
435 if (0 != __t) __t->_M_incr();
437 //static void _S_free_if_unref(_Self* __t) {
438 // if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
442 template<class _CharT, class _Alloc>
443 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
445 _CharT* _M_data; /* Not necessarily 0 terminated. */
446 /* The allocated size is */
447 /* _S_rounded_up_size(size), except */
448 /* in the GC case, in which it */
449 /* doesn't matter. */
451 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
452 typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
453 void _M_init(__true_type const& /*_IsBasicCharType*/) {
454 this->_M_c_string = _M_data;
456 void _M_init(__false_type const& /*_IsBasicCharType*/) {}
459 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
460 typedef typename _RopeRep::allocator_type allocator_type;
462 _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
463 : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
465 _STLP_ASSERT(_p_size > 0)
466 _M_init(_IsBasicCharType());
469 # ifdef _STLP_NO_ARROW_OPERATOR
471 _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
474 // The constructor assumes that d has been allocated with
475 // the proper allocator and the properly padded size.
476 // In contrast, the destructor deallocates the data:
478 if (_M_data != this->_M_c_string) {
479 this->_M_free_c_string();
481 _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
485 template<class _CharT, class _Alloc>
486 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
488 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
493 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
494 typedef typename _RopeRep::allocator_type allocator_type;
495 _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
496 : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
497 (max)(__l->_M_depth, __r->_M_depth) + 1, false,
498 __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
500 # ifdef _STLP_NO_ARROW_OPERATOR
501 _Rope_RopeConcatenation() {}
502 _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
505 ~_Rope_RopeConcatenation() {
506 this->_M_free_c_string();
507 _M_left->_M_unref_nonnil();
508 _M_right->_M_unref_nonnil();
512 template <class _CharT, class _Alloc>
513 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
515 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
517 char_producer<_CharT>* _M_fn;
519 * Char_producer is owned by the
520 * rope and should be explicitly
521 * deleted when the rope becomes
524 bool _M_delete_when_done;
525 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
526 typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
527 # ifdef _STLP_NO_ARROW_OPERATOR
528 _Rope_RopeFunction() {}
529 _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
532 _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
533 bool __d, allocator_type __a)
534 : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
535 , _M_delete_when_done(__d)
536 { _STLP_ASSERT(_p_size > 0) }
538 ~_Rope_RopeFunction() {
539 this->_M_free_c_string();
540 if (_M_delete_when_done) {
547 * Substring results are usually represented using just
548 * concatenation nodes. But in the case of very long flat ropes
549 * or ropes with a functional representation that isn't practical.
550 * In that case, we represent the __result as a special case of
551 * RopeFunction, whose char_producer points back to the rope itself.
552 * In all cases except repeated substring operations and
553 * deallocation, we treat the __result as a RopeFunction.
555 template<class _CharT, class _Alloc>
556 struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
558 // XXX this whole class should be rewritten.
559 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
560 _RopeRep *_M_base; // not 0
562 /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
564 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
565 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
566 switch (_M_base->_M_tag) {
567 case _RopeRep::_S_function:
568 case _RopeRep::_S_substringfn:
570 char_producer<_CharT>* __fn =
571 __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
572 _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
573 _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
574 (*__fn)(__start_pos + _M_start, __req_len, __buffer);
577 case _RopeRep::_S_leaf:
580 __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
581 _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
590 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
591 typedef typename _RopeRep::allocator_type allocator_type;
593 _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
594 : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
595 _M_base(__b), _M_start(__s) {
596 _STLP_ASSERT(__l > 0)
597 _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
598 _M_base->_M_ref_nonnil();
599 this->_M_tag = _RopeRep::_S_substringfn;
601 virtual ~_Rope_RopeSubstring()
602 { _M_base->_M_unref_nonnil(); }
606 * Self-destructing pointers to Rope_rep.
607 * These are not conventional smart pointers. Their
608 * only purpose in life is to ensure that unref is called
609 * on the pointer either at normal exit or if an exception
610 * is raised. It is the caller's responsibility to
611 * adjust reference counts when these pointers are initialized
612 * or assigned to. (This convention significantly reduces
613 * the number of potentially expensive reference count
616 template<class _CharT, class _Alloc>
617 struct _Rope_self_destruct_ptr {
618 _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
619 ~_Rope_self_destruct_ptr()
620 { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
621 # ifdef _STLP_USE_EXCEPTIONS
622 _Rope_self_destruct_ptr() : _M_ptr(0) {}
624 _Rope_self_destruct_ptr() {}
626 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
627 _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
628 _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
629 operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
630 _Rope_self_destruct_ptr<_CharT, _Alloc>&
631 operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
632 { _M_ptr = __x; return *this; }
636 * Dereferencing a nonconst iterator has to return something
637 * that behaves almost like a reference. It's not possible to
638 * return an actual reference since assignment requires extra
639 * work. And we would get into the same problems as with the
640 * CD2 version of basic_string.
642 template<class _CharT, class _Alloc>
643 class _Rope_char_ref_proxy {
644 typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
645 friend class rope<_CharT,_Alloc>;
646 friend class _Rope_iterator<_CharT,_Alloc>;
647 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
648 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
649 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
650 typedef rope<_CharT,_Alloc> _My_rope;
653 bool _M_current_valid;
654 _My_rope* _M_root; // The whole rope.
656 _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
657 _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
658 _Rope_char_ref_proxy(const _Self& __x) :
659 _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
660 // Don't preserve cache if the reference can outlive the
661 // expression. We claim that's not possible without calling
662 // a copy constructor or generating reference to a proxy
663 // reference. We declare the latter to have undefined semantics.
664 _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
665 : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
666 inline operator _CharT () const;
667 _Self& operator= (_CharT __c);
668 _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
669 _Self& operator= (const _Self& __c) {
670 return operator=((_CharT)__c);
674 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
675 template<class _CharT, class __Alloc>
676 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
677 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
683 // There is no really acceptable way to handle this. The default
684 // definition of swap doesn't work for proxy references.
685 // It can't really be made to work, even with ugly hacks, since
686 // the only unusual operation it uses is the copy constructor, which
687 // is needed for other purposes. We provide a macro for
688 // full specializations, and instantiate the most common case.
689 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
690 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
691 _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
692 _CharT __tmp = __a; \
697 _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
699 # ifndef _STLP_NO_WCHAR_T
700 _ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
703 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
705 template<class _CharT, class _Alloc>
706 class _Rope_char_ptr_proxy {
707 // XXX this class should be rewritten.
709 typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
710 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
712 rope<_CharT,_Alloc>* _M_root; // The whole rope.
714 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
715 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
716 _Rope_char_ptr_proxy(const _Self& __x)
717 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
718 _Rope_char_ptr_proxy() {}
719 _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
720 _STLP_ASSERT(0 == __x)
722 _Self& operator= (const _Self& __x) {
724 _M_root = __x._M_root;
728 _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
729 return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
736 * Unlike in the C version, we cache only part of the stack
737 * for rope iterators, since they must be efficiently copyable.
738 * When we run out of cache, we have to reconstruct the iterator
740 * Pointers from iterators are not included in reference counts.
741 * Iterators are assumed to be thread private. Ropes can
744 template<class _CharT, class _Alloc>
745 class _Rope_iterator_base
746 /* : public random_access_iterator<_CharT, ptrdiff_t> */
748 friend class rope<_CharT,_Alloc>;
749 typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
750 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
752 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
754 enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
755 enum { _S_iterator_buf_len = 15 };
756 size_t _M_current_pos;
759 // Starting position for current leaf
761 // Buffer possibly containing current char.
762 _CharT* _M_buf_start;
763 // Pointer to current char in buffer, != 0 ==> buffer valid.
765 // One past __last valid char in buffer.
768 // What follows is the path cache. We go out of our
769 // way to make this compact.
770 // Path_end contains the bottom section of the path from
771 // the root to the current leaf.
773 # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
774 _RopeRep const*_M_data[4];
776 _RopeRep const*_M_data[_S_path_cache_len];
779 // Last valid __pos in path_end;
780 // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
781 // point to concatenation nodes.
783 // (_M_path_directions >> __i) & 1 is 1
784 // if we got from _M_path_end[leaf_index - __i - 1]
785 // to _M_path_end[leaf_index - __i] by going to the
786 // __right. Assumes path_cache_len <= 9.
787 unsigned char _M_path_directions;
788 // Short buffer for surrounding chars.
789 // This is useful primarily for
790 // RopeFunctions. We put the buffer
791 // here to avoid locking in the
792 // multithreaded case.
793 // The cached path is generally assumed to be valid
794 // only if the buffer is valid.
796 # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
799 _CharT _M_data[_S_iterator_buf_len];
803 // Set buffer contents given path cache.
804 static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
805 // Set buffer contents and path cache.
806 static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
807 // As above, but assumes path cache is valid for previous posn.
808 static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
809 _Rope_iterator_base() {}
810 _Rope_iterator_base(_RopeRep* __root, size_t __pos)
811 : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
812 void _M_incr(size_t __n);
813 void _M_decr(size_t __n);
815 size_t index() const { return _M_current_pos; }
817 void _M_copy_buf(const _Self& __x) {
818 _M_tmp_buf = __x._M_tmp_buf;
819 if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
820 _M_buf_start = _M_tmp_buf._M_data;
821 _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
822 _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
824 _M_buf_end = __x._M_buf_end;
829 _Rope_iterator_base(const _Self& __x) :
830 _M_current_pos(__x._M_current_pos),
831 _M_root(__x._M_root),
832 _M_leaf_pos( __x._M_leaf_pos ),
833 _M_buf_start(__x._M_buf_start),
834 _M_buf_ptr(__x._M_buf_ptr),
835 _M_path_end(__x._M_path_end),
836 _M_leaf_index(__x._M_leaf_index),
837 _M_path_directions(__x._M_path_directions)
839 if (0 != __x._M_buf_ptr) {
843 _Self& operator = (const _Self& __x)
845 _M_current_pos = __x._M_current_pos;
846 _M_root = __x._M_root;
847 _M_buf_start = __x._M_buf_start;
848 _M_buf_ptr = __x._M_buf_ptr;
849 _M_path_end = __x._M_path_end;
850 _M_leaf_index = __x._M_leaf_index;
851 _M_path_directions = __x._M_path_directions;
852 _M_leaf_pos = __x._M_leaf_pos;
853 if (0 != __x._M_buf_ptr) {
860 template<class _CharT, class _Alloc> class _Rope_iterator;
862 template<class _CharT, class _Alloc>
863 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
864 friend class rope<_CharT,_Alloc>;
865 typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
866 typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
869 # ifndef _STLP_HAS_NO_NAMESPACES
870 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
871 // The one from the base class may not be directly visible.
873 _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
874 _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
875 // Only nonconst iterators modify root ref count
878 typedef _CharT reference; // Really a value. Returning a reference
879 // Would be a mess, since it would have
880 // to be included in refcount.
881 typedef const _CharT* pointer;
882 typedef _CharT value_type;
883 typedef ptrdiff_t difference_type;
884 typedef random_access_iterator_tag iterator_category;
887 _Rope_const_iterator() {}
888 _Rope_const_iterator(const _Self& __x) :
889 _Rope_iterator_base<_CharT,_Alloc>(__x) { }
890 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
891 _Rope_iterator_base<_CharT,_Alloc>(__x) {}
892 _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
893 _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
894 _Self& operator= (const _Self& __x) {
895 _Base::operator=(__x);
898 reference operator*() {
899 if (0 == this->_M_buf_ptr)
900 #if !defined (__DMC__)
901 _Rope_iterator_base<_CharT, _Alloc>::_S_setcache(*this);
903 { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
905 return *(this->_M_buf_ptr);
907 _Self& operator++() {
909 if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
910 this->_M_buf_ptr = __next;
911 ++this->_M_current_pos;
917 _Self& operator+=(ptrdiff_t __n) {
925 _Self& operator--() {
929 _Self& operator-=(ptrdiff_t __n) {
937 _Self operator++(int) {
938 size_t __old_pos = this->_M_current_pos;
940 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
941 // This makes a subsequent dereference expensive.
942 // Perhaps we should instead copy the iterator
943 // if it has a valid cache?
945 _Self operator--(int) {
946 size_t __old_pos = this->_M_current_pos;
948 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
950 inline reference operator[](size_t __n);
953 template<class _CharT, class _Alloc>
954 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
955 friend class rope<_CharT,_Alloc>;
956 typedef _Rope_iterator<_CharT, _Alloc> _Self;
957 typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
958 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
961 rope<_CharT,_Alloc>* _M_root_rope;
962 // root is treated as a cached version of this,
963 // and is used to detect changes to the underlying
965 // Root is included in the reference count.
966 // This is necessary so that we can detect changes reliably.
967 // Unfortunately, it requires careful bookkeeping for the
969 _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
973 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
974 typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
975 typedef _CharT value_type;
976 typedef ptrdiff_t difference_type;
977 typedef random_access_iterator_tag iterator_category;
979 ~_Rope_iterator() { //*TY 5/6/00 - added dtor to balance reference count
980 _RopeRep::_S_unref(this->_M_root);
983 rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
985 this->_M_root = 0; // Needed for reference counting.
987 _Rope_iterator(const _Self& __x) :
988 _Rope_iterator_base<_CharT,_Alloc>(__x) {
989 _M_root_rope = __x._M_root_rope;
990 _RopeRep::_S_ref(this->_M_root);
992 _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
993 _Self& operator= (const _Self& __x) {
994 _RopeRep* __old = this->_M_root;
995 _RopeRep::_S_ref(__x._M_root);
996 _Base::operator=(__x);
997 _M_root_rope = __x._M_root_rope;
998 _RopeRep::_S_unref(__old);
1001 reference operator*() {
1003 if (0 == this->_M_buf_ptr) {
1004 return reference(_M_root_rope, this->_M_current_pos);
1006 return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
1009 _Self& operator++() {
1013 _Self& operator+=(ptrdiff_t __n) {
1017 this->_M_decr(-__n);
1021 _Self& operator--() {
1025 _Self& operator-=(ptrdiff_t __n) {
1029 this->_M_incr(-__n);
1033 _Self operator++(int) {
1034 size_t __old_pos = this->_M_current_pos;
1036 return _Self(_M_root_rope, __old_pos);
1038 _Self operator--(int) {
1039 size_t __old_pos = this->_M_current_pos;
1041 return _Self(_M_root_rope, __old_pos);
1043 reference operator[](ptrdiff_t __n) {
1044 return reference(_M_root_rope, this->_M_current_pos + __n);
1048 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1049 template <class _CharT, class _Alloc>
1050 inline random_access_iterator_tag
1051 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
1052 template <class _CharT, class _Alloc>
1053 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1054 template <class _CharT, class _Alloc>
1055 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1056 template <class _CharT, class _Alloc>
1057 inline random_access_iterator_tag
1058 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1059 template <class _CharT, class _Alloc>
1060 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1061 template <class _CharT, class _Alloc>
1062 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1063 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
1065 template <class _CharT, class _Alloc, class _CharConsumer>
1066 bool _S_apply_to_pieces(_CharConsumer& __c,
1067 _Rope_RopeRep<_CharT, _Alloc> *__r,
1068 size_t __begin, size_t __end);
1069 // begin and end are assumed to be in range.
1071 template <class _CharT, class _Alloc>
1073 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
1074 : public __stlport_class<rope<_CharT, _Alloc> >
1077 typedef rope<_CharT,_Alloc> _Self;
1079 typedef _CharT value_type;
1080 typedef ptrdiff_t difference_type;
1081 typedef size_t size_type;
1082 typedef _CharT const_reference;
1083 typedef const _CharT* const_pointer;
1084 typedef _Rope_iterator<_CharT,_Alloc> iterator;
1085 typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1086 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1087 typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1089 friend class _Rope_iterator<_CharT,_Alloc>;
1090 friend class _Rope_const_iterator<_CharT,_Alloc>;
1091 friend struct _Rope_RopeRep<_CharT,_Alloc>;
1092 friend class _Rope_iterator_base<_CharT,_Alloc>;
1093 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1094 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1095 friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1097 _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1100 typedef _CharT* _Cstrptr;
1102 static _CharT _S_empty_c_str[1];
1104 enum { _S_copy_max = 23 };
1105 // For strings shorter than _S_copy_max, we copy to
1108 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1109 typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
1112 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1113 typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1116 // The only data member of a rope:
1117 _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1120 allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1123 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1124 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1125 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1126 typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1128 // Retrieve a character at the indicated position.
1129 static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1131 // Obtain a pointer to the character at the indicated position.
1132 // The pointer can be used to change the character.
1133 // If such a pointer cannot be produced, as is frequently the
1134 // case, 0 is returned instead.
1135 // (Returns nonzero only if all nodes in the path have a refcount
1137 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1139 static void _S_unref(_RopeRep* __t) {
1140 _RopeRep::_S_unref(__t);
1142 static void _S_ref(_RopeRep* __t) {
1143 _RopeRep::_S_ref(__t);
1146 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1148 // _Result is counted in refcount.
1149 static _RopeRep* _S_substring(_RopeRep* __base,
1150 size_t __start, size_t __endp1);
1152 static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1153 const _CharT* __iter, size_t __slen);
1154 // Concatenate rope and char ptr, copying __s.
1155 // Should really take an arbitrary iterator.
1156 // Result is counted in refcount.
1157 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1158 const _CharT* __iter, size_t __slen);
1159 // As above, but one reference to __r is about to be
1160 // destroyed. Thus the pieces may be recycled if all
1161 // relevent reference counts are 1.
1163 // General concatenation on _RopeRep. _Result
1164 // has refcount of 1. Adjusts argument refcounts.
1165 static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1168 #if defined (_STLP_MEMBER_TEMPLATES)
1169 template <class _CharConsumer>
1171 typedef _Rope_char_consumer<_CharT> _CharConsumer;
1173 void apply_to_pieces(size_t __begin, size_t __end,
1174 _CharConsumer& __c) const
1175 { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
1179 static size_t _S_rounded_up_size(size_t __n)
1180 { return _RopeRep::_S_rounded_up_size(__n); }
1182 // Allocate and construct a RopeLeaf using the supplied allocator
1183 // Takes ownership of s instead of copying.
1184 static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
1185 size_t _p_size, allocator_type __a) {
1186 _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1187 _RopeLeaf).allocate(1);
1189 _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1191 _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1192 _RopeLeaf).deallocate(__space, 1))
1196 static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
1197 allocator_type __a) {
1198 _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1199 _RopeConcatenation).allocate(1);
1200 return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1203 static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1204 size_t _p_size, bool __d, allocator_type __a) {
1205 _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1206 _RopeFunction).allocate(1);
1207 return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1210 static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1211 size_t __l, allocator_type __a) {
1212 _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1213 _RopeSubstring).allocate(1);
1214 return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1218 _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1219 size_t _p_size, allocator_type __a) {
1220 if (0 == _p_size) return 0;
1222 _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1224 _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
1225 _S_construct_null(__buf + _p_size);
1228 return _S_new_RopeLeaf(__buf, _p_size, __a);
1230 _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1231 _STLP_RET_AFTER_THROW(0)
1235 // Concatenation of nonempty strings.
1236 // Always builds a concatenation node.
1237 // Rebalances if the result is too deep.
1238 // Result has refcount 1.
1239 // Does not increment left and right ref counts even though
1240 // they are referenced.
1242 _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1244 // Concatenation helper functions
1246 _S_leaf_concat_char_iter(_RopeLeaf* __r,
1247 const _CharT* __iter, size_t __slen);
1248 // Concatenate by copying leaf.
1249 // should take an arbitrary iterator
1250 // result has refcount 1.
1251 static _RopeLeaf* _S_destr_leaf_concat_char_iter
1252 (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1253 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1256 // A helper function for exponentiating strings.
1257 // This uses a nonstandard refcount convention.
1258 // The result has refcount 0.
1259 typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1260 #if !defined (__GNUC__) || (__GNUC__ < 3)
1263 friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
1267 static size_t _S_char_ptr_len(const _CharT* __s) {
1268 return char_traits<_CharT>::length(__s);
1271 public: /* for operators */
1272 rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1273 : _M_tree_ptr(__a, __t) { }
1275 // Copy __r to the _CharT buffer.
1276 // Returns __buffer + __r->_M_size._M_data.
1277 // Assumes that buffer is uninitialized.
1278 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1280 // Again, with explicit starting position and length.
1281 // Assumes that buffer is uninitialized.
1282 static _CharT* _S_flatten(_RopeRep* __r,
1283 size_t __start, size_t __len,
1286 // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1288 static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
1290 static bool _S_is_balanced(_RopeRep* __r)
1291 { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1293 static bool _S_is_almost_balanced(_RopeRep* __r) {
1294 return (__r->_M_depth == 0 ||
1295 __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
1298 static bool _S_is_roughly_balanced(_RopeRep* __r) {
1299 return (__r->_M_depth <= 1 ||
1300 __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
1303 // Assumes the result is not empty.
1304 static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1305 _RopeRep* __right) {
1306 _RopeRep* __result = _S_concat_rep(__left, __right);
1307 if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1311 // The basic rebalancing operation. Logically copies the
1312 // rope. The result has refcount of 1. The client will
1313 // usually decrement the reference count of __r.
1314 // The result is within height 2 of balanced by the above
1316 static _RopeRep* _S_balance(_RopeRep* __r);
1318 // Add all unbalanced subtrees to the forest of balanceed trees.
1319 // Used only by balance.
1320 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1322 // Add __r to forest, assuming __r is already balanced.
1323 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1326 // Print to stdout, exposing structure
1327 static void _S_dump(_RopeRep* __r, int __indent = 0);
1330 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1331 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1333 void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
1335 void _M_reset(_RopeRep* __r) {
1336 //if (__r != _M_tree_ptr._M_data) {
1337 _S_unref(_M_tree_ptr._M_data);
1338 _M_tree_ptr._M_data = __r;
1343 bool empty() const { return 0 == _M_tree_ptr._M_data; }
1345 // Comparison member function. This is public only for those
1346 // clients that need a ternary comparison. Others
1347 // should use the comparison operators below.
1348 int compare(const _Self& __y) const {
1349 return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1352 rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1353 : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
1356 rope(const _CharT* __s, size_t __len,
1357 const allocator_type& __a = allocator_type())
1358 : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
1361 // Should perhaps be templatized with respect to the iterator type
1362 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1364 rope(const _CharT *__s, const _CharT *__e,
1365 const allocator_type& __a = allocator_type())
1366 : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
1369 rope(const const_iterator& __s, const const_iterator& __e,
1370 const allocator_type& __a = allocator_type())
1371 : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1372 __e._M_current_pos))
1375 rope(const iterator& __s, const iterator& __e,
1376 const allocator_type& __a = allocator_type())
1377 : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1378 __e._M_current_pos))
1381 rope(_CharT __c, const allocator_type& __a = allocator_type())
1382 : _M_tree_ptr(__a, (_RopeRep*)0) {
1383 _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1385 _Copy_Construct(__buf, __c);
1386 _S_construct_null(__buf + 1);
1389 _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1391 _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1394 rope(size_t __n, _CharT __c,
1395 const allocator_type& __a = allocator_type()):
1396 _M_tree_ptr(__a, (_RopeRep*)0) {
1400 rope<_CharT,_Alloc> __result;
1401 # define __exponentiate_threshold size_t(32)
1402 _RopeRep* __remainder;
1403 rope<_CharT,_Alloc> __remainder_rope;
1406 typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1408 size_t __exponent = __n / __exponentiate_threshold;
1409 size_t __rest = __n % __exponentiate_threshold;
1413 _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1414 uninitialized_fill_n(__rest_buffer, __rest, __c);
1415 _S_construct_null(__rest_buffer + __rest);
1417 __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1419 _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1421 __remainder_rope._M_tree_ptr._M_data = __remainder;
1422 if (__exponent != 0) {
1423 _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1424 _RopeLeaf* __base_leaf;
1425 rope<_CharT,_Alloc> __base_rope;
1426 uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1427 _S_construct_null(__base_buffer + __exponentiate_threshold);
1429 __base_leaf = _S_new_RopeLeaf(__base_buffer,
1430 __exponentiate_threshold, __a);
1432 _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1433 __exponentiate_threshold, __a))
1434 __base_rope._M_tree_ptr._M_data = __base_leaf;
1435 if (1 == __exponent) {
1436 __result = __base_rope;
1437 // One each for base_rope and __result
1438 //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1440 __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
1442 if (0 != __remainder) {
1443 __result += __remainder_rope;
1446 __result = __remainder_rope;
1448 _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
1449 _M_tree_ptr._M_data->_M_ref_nonnil();
1450 # undef __exponentiate_threshold
1453 rope(const allocator_type& __a = allocator_type())
1454 : _M_tree_ptr(__a, (_RopeRep*)0) {}
1456 // Construct a rope from a function that can compute its members
1457 rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1458 const allocator_type& __a = allocator_type())
1459 : _M_tree_ptr(__a, (_RopeRep*)0) {
1460 _M_tree_ptr._M_data = (0 == __len) ?
1461 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1464 rope(const _Self& __x)
1465 : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
1466 _S_ref(_M_tree_ptr._M_data);
1469 rope(__move_source<_Self> __src)
1470 : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
1471 __src.get()._M_tree_ptr._M_data = 0;
1475 _S_unref(_M_tree_ptr._M_data);
1478 _Self& operator=(const _Self& __x) {
1479 _STLP_ASSERT(get_allocator() == __x.get_allocator())
1480 _S_ref(__x._M_tree_ptr._M_data);
1481 _M_reset(__x._M_tree_ptr._M_data);
1486 _S_unref(_M_tree_ptr._M_data);
1487 _M_tree_ptr._M_data = 0;
1489 void push_back(_CharT __x) {
1490 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
1494 _RopeRep* __old = _M_tree_ptr._M_data;
1495 _M_tree_ptr._M_data =
1496 _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
1500 _CharT back() const {
1501 return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
1504 void push_front(_CharT __x) {
1505 _RopeRep* __old = _M_tree_ptr._M_data;
1507 _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
1509 _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
1513 _STLP_UNWIND(_S_unref(__left))
1517 _RopeRep* __old = _M_tree_ptr._M_data;
1518 _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
1522 _CharT front() const {
1523 return _S_fetch(_M_tree_ptr._M_data, 0);
1527 _RopeRep* __old = _M_tree_ptr._M_data;
1528 _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
1532 void copy(_CharT* __buffer) const {
1533 _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
1534 _S_flatten(_M_tree_ptr._M_data, __buffer);
1538 * This is the copy function from the standard, but
1539 * with the arguments reordered to make it consistent with the
1540 * rest of the interface.
1541 * Note that this guaranteed not to compile if the draft standard
1544 size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
1545 size_t _p_size = size();
1546 size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
1548 _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
1549 _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
1554 // Print to stdout, exposing structure. May be useful for
1555 // performance debugging.
1557 _S_dump(_M_tree_ptr._M_data);
1561 // Convert to 0 terminated string in new allocated memory.
1562 // Embedded 0s in the input do not terminate the copy.
1563 const _CharT* c_str() const;
1565 // As above, but also use the flattened representation as the
1566 // the new rope representation.
1567 const _CharT* replace_with_c_str();
1569 // Reclaim memory for the c_str generated flattened string.
1570 // Intentionally undocumented, since it's hard to say when this
1571 // is safe for multiple threads.
1572 void delete_c_str () {
1573 if (0 == _M_tree_ptr._M_data) return;
1574 if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
1575 ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
1576 _M_tree_ptr._M_data->_M_c_string) {
1577 // Representation shared
1580 _M_tree_ptr._M_data->_M_free_c_string();
1581 _M_tree_ptr._M_data->_M_c_string = 0;
1584 _CharT operator[] (size_type __pos) const {
1585 return _S_fetch(_M_tree_ptr._M_data, __pos);
1588 _CharT at(size_type __pos) const {
1589 if (__pos >= size()) _M_throw_out_of_range();
1590 return (*this)[__pos];
1593 const_iterator begin() const {
1594 return(const_iterator(_M_tree_ptr._M_data, 0));
1597 // An easy way to get a const iterator from a non-const container.
1598 const_iterator const_begin() const {
1599 return(const_iterator(_M_tree_ptr._M_data, 0));
1602 const_iterator end() const {
1603 return(const_iterator(_M_tree_ptr._M_data, size()));
1606 const_iterator const_end() const {
1607 return(const_iterator(_M_tree_ptr._M_data, size()));
1610 size_type size() const {
1611 return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
1614 size_type length() const {
1618 size_type max_size() const {
1619 return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
1620 // Guarantees that the result can be sufficiently
1621 // balanced. Longer ropes will probably still work,
1622 // but it's harder to make guarantees.
1625 const_reverse_iterator rbegin() const {
1626 return const_reverse_iterator(end());
1629 const_reverse_iterator const_rbegin() const {
1630 return const_reverse_iterator(end());
1633 const_reverse_iterator rend() const {
1634 return const_reverse_iterator(begin());
1637 const_reverse_iterator const_rend() const {
1638 return const_reverse_iterator(begin());
1640 // The symmetric cases are intentionally omitted, since they're presumed
1641 // to be less common, and we don't handle them as well.
1643 // The following should really be templatized.
1644 // The first argument should be an input iterator or
1645 // forward iterator with value_type _CharT.
1646 _Self& append(const _CharT* __iter, size_t __n) {
1647 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
1651 _Self& append(const _CharT* __c_string) {
1652 size_t __len = _S_char_ptr_len(__c_string);
1653 append(__c_string, __len);
1657 _Self& append(const _CharT* __s, const _CharT* __e) {
1658 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
1662 _Self& append(const_iterator __s, const_iterator __e) {
1663 _STLP_ASSERT(__s._M_root == __e._M_root)
1664 _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
1665 _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
1666 _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
1670 _Self& append(_CharT __c) {
1671 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
1675 _Self& append() { return append(_CharT()); } // XXX why?
1677 _Self& append(const _Self& __y) {
1678 _STLP_ASSERT(__y.get_allocator() == get_allocator())
1679 _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
1683 _Self& append(size_t __n, _CharT __c) {
1684 rope<_CharT,_Alloc> __last(__n, __c);
1685 return append(__last);
1688 void swap(_Self& __b) {
1689 _M_tree_ptr.swap(__b._M_tree_ptr);
1693 // Result is included in refcount.
1694 static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1695 size_t __pos2, _RopeRep* __r) {
1696 if (0 == __old) { _S_ref(__r); return __r; }
1697 _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
1698 _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
1699 _STLP_MPWFIX_TRY //*TY 06/01/2000 -
1703 __result = _S_concat_rep(__left, __right);
1705 _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
1706 _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
1707 __result = _S_concat_rep(__left_result, __right);
1710 _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
1714 void insert(size_t __p, const _Self& __r) {
1715 if (__p > size()) _M_throw_out_of_range();
1716 _STLP_ASSERT(get_allocator() == __r.get_allocator())
1717 _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
1720 void insert(size_t __p, size_t __n, _CharT __c) {
1721 rope<_CharT,_Alloc> __r(__n,__c);
1725 void insert(size_t __p, const _CharT* __i, size_t __n) {
1726 if (__p > size()) _M_throw_out_of_range();
1727 _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
1728 _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
1729 _Self_destruct_ptr __left_result(
1730 _S_concat_char_iter(__left, __i, __n));
1731 // _S_ destr_concat_char_iter should be safe here.
1732 // But as it stands it's probably not a win, since __left
1733 // is likely to have additional references.
1734 _M_reset(_S_concat_rep(__left_result, __right));
1737 void insert(size_t __p, const _CharT* __c_string) {
1738 insert(__p, __c_string, _S_char_ptr_len(__c_string));
1741 void insert(size_t __p, _CharT __c) {
1742 insert(__p, &__c, 1);
1745 void insert(size_t __p) {
1746 _CharT __c = _CharT();
1747 insert(__p, &__c, 1);
1750 void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1751 _Self __r(__i, __j);
1755 void insert(size_t __p, const const_iterator& __i,
1756 const const_iterator& __j) {
1757 _Self __r(__i, __j);
1761 void insert(size_t __p, const iterator& __i,
1762 const iterator& __j) {
1763 _Self __r(__i, __j);
1767 // (position, length) versions of replace operations:
1768 void replace(size_t __p, size_t __n, const _Self& __r) {
1769 if (__p > size()) _M_throw_out_of_range();
1770 _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
1773 void replace(size_t __p, size_t __n,
1774 const _CharT* __i, size_t __i_len) {
1775 _Self __r(__i, __i_len);
1776 replace(__p, __n, __r);
1779 void replace(size_t __p, size_t __n, _CharT __c) {
1781 replace(__p, __n, __r);
1784 void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1785 _Self __r(__c_string);
1786 replace(__p, __n, __r);
1789 void replace(size_t __p, size_t __n,
1790 const _CharT* __i, const _CharT* __j) {
1791 _Self __r(__i, __j);
1792 replace(__p, __n, __r);
1795 void replace(size_t __p, size_t __n,
1796 const const_iterator& __i, const const_iterator& __j) {
1797 _Self __r(__i, __j);
1798 replace(__p, __n, __r);
1801 void replace(size_t __p, size_t __n,
1802 const iterator& __i, const iterator& __j) {
1803 _Self __r(__i, __j);
1804 replace(__p, __n, __r);
1807 // Single character variants:
1808 void replace(size_t __p, _CharT __c) {
1809 if (__p > size()) _M_throw_out_of_range();
1810 iterator __i(this, __p);
1814 void replace(size_t __p, const _Self& __r) {
1815 replace(__p, 1, __r);
1818 void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1819 replace(__p, 1, __i, __i_len);
1822 void replace(size_t __p, const _CharT* __c_string) {
1823 replace(__p, 1, __c_string);
1826 void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1827 replace(__p, 1, __i, __j);
1830 void replace(size_t __p, const const_iterator& __i,
1831 const const_iterator& __j) {
1832 replace(__p, 1, __i, __j);
1835 void replace(size_t __p, const iterator& __i,
1836 const iterator& __j) {
1837 replace(__p, 1, __i, __j);
1840 // Erase, (position, size) variant.
1841 void erase(size_t __p, size_t __n) {
1842 if (__p > size()) _M_throw_out_of_range();
1843 _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
1846 // Erase, single character
1847 void erase(size_t __p) {
1848 erase(__p, __p + 1);
1851 // Insert, iterator variants.
1852 iterator insert(const iterator& __p, const _Self& __r)
1853 { insert(__p.index(), __r); return __p; }
1854 iterator insert(const iterator& __p, size_t __n, _CharT __c)
1855 { insert(__p.index(), __n, __c); return __p; }
1856 iterator insert(const iterator& __p, _CharT __c)
1857 { insert(__p.index(), __c); return __p; }
1858 iterator insert(const iterator& __p )
1859 { insert(__p.index()); return __p; }
1860 iterator insert(const iterator& __p, const _CharT* c_string)
1861 { insert(__p.index(), c_string); return __p; }
1862 iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1863 { insert(__p.index(), __i, __n); return __p; }
1864 iterator insert(const iterator& __p, const _CharT* __i,
1866 { insert(__p.index(), __i, __j); return __p; }
1867 iterator insert(const iterator& __p,
1868 const const_iterator& __i, const const_iterator& __j)
1869 { insert(__p.index(), __i, __j); return __p; }
1870 iterator insert(const iterator& __p,
1871 const iterator& __i, const iterator& __j)
1872 { insert(__p.index(), __i, __j); return __p; }
1874 // Replace, range variants.
1875 void replace(const iterator& __p, const iterator& __q,
1877 { replace(__p.index(), __q.index() - __p.index(), __r); }
1878 void replace(const iterator& __p, const iterator& __q, _CharT __c)
1879 { replace(__p.index(), __q.index() - __p.index(), __c); }
1880 void replace(const iterator& __p, const iterator& __q,
1881 const _CharT* __c_string)
1882 { replace(__p.index(), __q.index() - __p.index(), __c_string); }
1883 void replace(const iterator& __p, const iterator& __q,
1884 const _CharT* __i, size_t __n)
1885 { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
1886 void replace(const iterator& __p, const iterator& __q,
1887 const _CharT* __i, const _CharT* __j)
1888 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1889 void replace(const iterator& __p, const iterator& __q,
1890 const const_iterator& __i, const const_iterator& __j)
1891 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1892 void replace(const iterator& __p, const iterator& __q,
1893 const iterator& __i, const iterator& __j)
1894 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1896 // Replace, iterator variants.
1897 void replace(const iterator& __p, const _Self& __r)
1898 { replace(__p.index(), __r); }
1899 void replace(const iterator& __p, _CharT __c)
1900 { replace(__p.index(), __c); }
1901 void replace(const iterator& __p, const _CharT* __c_string)
1902 { replace(__p.index(), __c_string); }
1903 void replace(const iterator& __p, const _CharT* __i, size_t __n)
1904 { replace(__p.index(), __i, __n); }
1905 void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
1906 { replace(__p.index(), __i, __j); }
1907 void replace(const iterator& __p, const_iterator __i,
1909 { replace(__p.index(), __i, __j); }
1910 void replace(const iterator& __p, iterator __i, iterator __j)
1911 { replace(__p.index(), __i, __j); }
1913 // Iterator and range variants of erase
1914 iterator erase(const iterator& __p, const iterator& __q) {
1915 size_t __p_index = __p.index();
1916 erase(__p_index, __q.index() - __p_index);
1917 return iterator(this, __p_index);
1919 iterator erase(const iterator& __p) {
1920 size_t __p_index = __p.index();
1921 erase(__p_index, 1);
1922 return iterator(this, __p_index);
1925 _Self substr(size_t __start, size_t __len = 1) const {
1926 if (__start > size()) _M_throw_out_of_range();
1927 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
1930 _Self substr(iterator __start, iterator __end) const {
1931 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1934 _Self substr(iterator __start) const {
1935 size_t __pos = __start.index();
1936 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1939 _Self substr(const_iterator __start, const_iterator __end) const {
1940 // This might eventually take advantage of the cache in the
1942 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1945 rope<_CharT,_Alloc> substr(const_iterator __start) {
1946 size_t __pos = __start.index();
1947 return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1950 #include <stl/_string_npos.h>
1952 size_type find(const _Self& __s, size_type __pos = 0) const {
1953 if (__pos >= size())
1954 # ifndef _STLP_OLD_ROPE_SEMANTICS
1960 size_type __result_pos;
1961 const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
1962 __result_pos = __result.index();
1963 # ifndef _STLP_OLD_ROPE_SEMANTICS
1964 if (__result_pos == size()) __result_pos = npos;
1966 return __result_pos;
1968 size_type find(_CharT __c, size_type __pos = 0) const;
1969 size_type find(const _CharT* __s, size_type __pos = 0) const {
1970 size_type __result_pos;
1971 const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
1972 __s, __s + _S_char_ptr_len(__s));
1973 __result_pos = __result.index();
1974 # ifndef _STLP_OLD_ROPE_SEMANTICS
1975 if (__result_pos == size()) __result_pos = npos;
1977 return __result_pos;
1980 iterator mutable_begin() {
1981 return(iterator(this, 0));
1984 iterator mutable_end() {
1985 return(iterator(this, size()));
1988 reverse_iterator mutable_rbegin() {
1989 return reverse_iterator(mutable_end());
1992 reverse_iterator mutable_rend() {
1993 return reverse_iterator(mutable_begin());
1996 reference mutable_reference_at(size_type __pos) {
1997 return reference(this, __pos);
2001 reference operator[] (size_type __pos) {
2002 return reference(this, __pos);
2005 reference at(size_type __pos) {
2006 if (__pos >= size()) _M_throw_out_of_range();
2007 return (*this)[__pos];
2010 void resize(size_type, _CharT) {}
2011 void resize(size_type) {}
2012 void reserve(size_type = 0) {}
2013 size_type capacity() const {
2017 // Stuff below this line is dangerous because it's error prone.
2018 // I would really like to get rid of it.
2019 // copy function with funny arg ordering.
2020 size_type copy(_CharT* __buffer, size_type __n,
2021 size_type __pos = 0) const {
2022 return copy(__pos, __n, __buffer);
2025 iterator end() { return mutable_end(); }
2027 iterator begin() { return mutable_begin(); }
2029 reverse_iterator rend() { return mutable_rend(); }
2031 reverse_iterator rbegin() { return mutable_rbegin(); }
2035 const_iterator end() { return const_end(); }
2037 const_iterator begin() { return const_begin(); }
2039 const_reverse_iterator rend() { return const_rend(); }
2041 const_reverse_iterator rbegin() { return const_rbegin(); }
2046 #if !defined (_STLP_STATIC_CONST_INIT_BUG)
2047 # if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
2048 template <class _CharT, class _Alloc>
2049 const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
2053 template <class _CharT, class _Alloc>
2055 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
2056 { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
2058 template <class _CharT, class _Alloc>
2059 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2060 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2061 return (__x._M_current_pos == __y._M_current_pos &&
2062 __x._M_root == __y._M_root);
2065 template <class _CharT, class _Alloc>
2066 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2067 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2068 { return (__x._M_current_pos < __y._M_current_pos); }
2070 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2072 template <class _CharT, class _Alloc>
2073 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2074 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2075 { return !(__x == __y); }
2077 template <class _CharT, class _Alloc>
2078 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2079 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2080 { return __y < __x; }
2082 template <class _CharT, class _Alloc>
2083 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2084 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2085 { return !(__y < __x); }
2087 template <class _CharT, class _Alloc>
2088 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2089 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2090 { return !(__x < __y); }
2092 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2094 template <class _CharT, class _Alloc>
2095 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2096 const _Rope_const_iterator<_CharT,_Alloc>& __y)
2097 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
2099 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2100 template <class _CharT, class _Alloc>
2101 inline _Rope_const_iterator<_CharT,_Alloc>
2102 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
2103 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
2106 template <class _CharT, class _Alloc>
2107 inline _Rope_const_iterator<_CharT,_Alloc>
2108 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
2109 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
2111 template <class _CharT, class _Alloc>
2112 inline _Rope_const_iterator<_CharT,_Alloc>
2113 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
2114 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
2116 template <class _CharT, class _Alloc>
2117 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2118 const _Rope_iterator<_CharT,_Alloc>& __y) {
2119 return (__x._M_current_pos == __y._M_current_pos &&
2120 __x._M_root_rope == __y._M_root_rope);
2123 template <class _CharT, class _Alloc>
2124 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2125 const _Rope_iterator<_CharT,_Alloc>& __y)
2126 { return (__x._M_current_pos < __y._M_current_pos); }
2128 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
2129 template <class _CharT, class _Alloc>
2130 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
2131 const _Rope_iterator<_CharT,_Alloc>& __y)
2132 { return !(__x == __y); }
2134 template <class _CharT, class _Alloc>
2135 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
2136 const _Rope_iterator<_CharT,_Alloc>& __y)
2137 { return __y < __x; }
2139 template <class _CharT, class _Alloc>
2140 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
2141 const _Rope_iterator<_CharT,_Alloc>& __y)
2142 { return !(__y < __x); }
2144 template <class _CharT, class _Alloc>
2145 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
2146 const _Rope_iterator<_CharT,_Alloc>& __y)
2147 { return !(__x < __y); }
2148 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2150 template <class _CharT, class _Alloc>
2151 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2152 const _Rope_iterator<_CharT,_Alloc>& __y)
2153 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
2155 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2156 template <class _CharT, class _Alloc>
2157 inline _Rope_iterator<_CharT,_Alloc>
2158 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2160 return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
2164 template <class _CharT, class _Alloc>
2165 inline _Rope_iterator<_CharT,_Alloc>
2166 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2168 return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
2171 template <class _CharT, class _Alloc>
2172 inline _Rope_iterator<_CharT,_Alloc>
2173 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2174 return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
2177 template <class _CharT, class _Alloc>
2178 inline rope<_CharT,_Alloc>
2179 operator+ (const rope<_CharT,_Alloc>& __left,
2180 const rope<_CharT,_Alloc>& __right) {
2181 _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
2182 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
2183 // Inlining this should make it possible to keep __left and __right in registers.
2186 template <class _CharT, class _Alloc>
2187 inline rope<_CharT,_Alloc>&
2188 operator+= (rope<_CharT,_Alloc>& __left,
2189 const rope<_CharT,_Alloc>& __right) {
2190 __left.append(__right);
2194 template <class _CharT, class _Alloc>
2195 inline rope<_CharT,_Alloc>
2196 operator+ (const rope<_CharT,_Alloc>& __left,
2197 const _CharT* __right) {
2198 size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2199 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
2202 template <class _CharT, class _Alloc>
2203 inline rope<_CharT,_Alloc>&
2204 operator+= (rope<_CharT,_Alloc>& __left,
2205 const _CharT* __right) {
2206 __left.append(__right);
2210 template <class _CharT, class _Alloc>
2211 inline rope<_CharT,_Alloc>
2212 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
2213 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
2216 template <class _CharT, class _Alloc>
2217 inline rope<_CharT,_Alloc>&
2218 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
2219 __left.append(__right);
2223 template <class _CharT, class _Alloc>
2225 operator< (const rope<_CharT,_Alloc>& __left,
2226 const rope<_CharT,_Alloc>& __right) {
2227 return __left.compare(__right) < 0;
2230 template <class _CharT, class _Alloc>
2232 operator== (const rope<_CharT,_Alloc>& __left,
2233 const rope<_CharT,_Alloc>& __right) {
2234 return __left.compare(__right) == 0;
2237 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2239 template <class _CharT, class _Alloc>
2241 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2242 return !(__x == __y);
2245 template <class _CharT, class _Alloc>
2247 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2251 template <class _CharT, class _Alloc>
2253 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2254 return !(__y < __x);
2257 template <class _CharT, class _Alloc>
2259 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2260 return !(__x < __y);
2263 template <class _CharT, class _Alloc>
2264 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2265 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2266 return !(__x == __y);
2269 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2271 template <class _CharT, class _Alloc>
2272 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2273 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2274 return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2277 #if !defined (_STLP_USE_NO_IOSTREAMS)
2278 template<class _CharT, class _Traits, class _Alloc>
2279 basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
2280 const rope<_CharT, _Alloc>& __r);
2283 typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
2284 #if defined (_STLP_HAS_WCHAR_T)
2285 typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
2288 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2289 { return __c.mutable_reference_at(__i); }
2291 #if defined (_STLP_HAS_WCHAR_T)
2292 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2293 { return __c.mutable_reference_at(__i); }
2296 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
2297 template <class _CharT, class _Alloc>
2298 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
2302 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
2303 # ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
2304 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
2307 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
2310 // Hash functions should probably be revisited later:
2311 _STLP_TEMPLATE_NULL struct hash<crope> {
2312 size_t operator()(const crope& __str) const {
2313 size_t _p_size = __str.size();
2315 if (0 == _p_size) return 0;
2316 return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
2320 #if defined (_STLP_HAS_WCHAR_T) // dwa 8/21/97
2321 _STLP_TEMPLATE_NULL struct hash<wrope> {
2322 size_t operator()(const wrope& __str) const {
2323 size_t _p_size = __str.size();
2325 if (0 == _p_size) return 0;
2326 return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
2331 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
2332 // I couldn't get this to work with VC++
2333 template<class _CharT,class _Alloc>
2334 # if defined (__DMC__) && !defined (__PUT_STATIC_DATA_MEMBERS_HERE)
2337 void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
2338 _Rope_iterator<_CharT, _Alloc> __middle,
2339 _Rope_iterator<_CharT, _Alloc> __last);
2341 inline void rotate(_Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __first,
2342 _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __middle,
2343 _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __last)
2344 { _Rope_rotate(__first, __middle, __last); }
2347 template <class _CharT, class _Alloc>
2348 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
2349 if (_M_current_valid) {
2352 return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
2356 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION)
2357 template <class _CharT, class _Alloc>
2358 struct __move_traits<rope<_CharT, _Alloc> > {
2359 typedef __stlp_movable implemented;
2360 //Completness depends on the allocator:
2361 typedef typename __move_traits<_Alloc>::complete complete;
2367 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
2368 # include <stl/_rope.c>
2371 #endif /* _STLP_INTERNAL_ROPE_H */