2 * © Portions copyright (c) 2006-2007 Nokia Corporation. All rights reserved.
3 * Copyright (c) 1996,1997
4 * Silicon Graphics Computer Systems, Inc.
7 * Moscow Center for SPARC Technology
12 * This material is provided "as is", with absolutely no warranty expressed
13 * or implied. Any use is at your own risk.
15 * Permission to use or copy this software for any purpose is hereby granted
16 * without fee, provided the above notices are retained on all copies.
17 * Permission to modify the code and to distribute modified code is granted,
18 * provided the above notices are retained, and a notice that the code was
19 * modified is included with the above copyright notice.
23 /* NOTE: This is an internal header file, included by other STL headers.
24 * You should not attempt to use it directly.
27 // rope<_CharT,_Alloc> is a sequence of _CharT.
28 // Ropes appear to be mutable, but update operations
29 // really copy enough of the data structure to leave the original
30 // valid. Thus ropes can be logically copied by just copying
33 #ifndef _STLP_INTERNAL_ROPE_H
34 # define _STLP_INTERNAL_ROPE_H
36 # ifndef _STLP_INTERNAL_ALGOBASE_H
37 # include <stl/_algobase.h>
44 # ifndef _STLP_INTERNAL_ALLOC_H
45 # include <stl/_alloc.h>
48 # ifndef _STLP_INTERNAL_ITERATOR_H
49 # include <stl/_iterator.h>
52 # ifndef _STLP_INTERNAL_ALGO_H
53 # include <stl/_algo.h>
56 # ifndef _STLP_INTERNAL_FUNCTION_H
57 # include <stl/_function.h>
60 # ifndef _STLP_INTERNAL_NUMERIC_H
61 # include <stl/_numeric.h>
64 # ifndef _STLP_INTERNAL_HASH_FUN_H
65 # include <stl/_hash_fun.h>
69 # define __GC_CONST const
71 # include <stl/_threads.h>
72 # define __GC_CONST // constant except for deallocation
74 # ifdef _STLP_SGI_THREADS
78 #ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
79 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
80 #elif defined(__MRC__)||defined(__SC__)
81 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
83 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
88 // First a lot of forward declarations. The standard seems to require
89 // much stricter "declaration before use" than many of the implementations
91 template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
92 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
93 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
94 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
95 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
96 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
97 template<class _CharT, class _Alloc> class _Rope_iterator;
98 template<class _CharT, class _Alloc> class _Rope_const_iterator;
99 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
100 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
102 // Some helpers, so we can use power on ropes.
103 // See below for why this isn't local to the implementation.
105 // This uses a nonstandard refcount convention.
106 // The result has refcount 0.
107 template<class _CharT, class _Alloc>
108 struct _Rope_Concat_fn
109 : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
110 rope<_CharT,_Alloc> > {
111 rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
112 const rope<_CharT,_Alloc>& __y) {
117 template <class _CharT, class _Alloc>
120 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
122 return rope<_CharT,_Alloc>();
125 // The _S_eos function is used for those functions that
126 // convert to/from C-like strings to detect the end of the string.
128 // The end-of-C-string character.
129 // This is what the draft standard says it should be.
130 template <class _CharT>
131 inline _CharT _S_eos(_CharT*) { return _CharT(); }
133 // fbp : some compilers fail to zero-initialize builtins ;(
134 inline const char _S_eos(const char*) { return 0; }
135 # ifdef _STLP_HAS_WCHAR_T
136 inline const wchar_t _S_eos(const wchar_t*) { return 0; }
139 // Test for basic character types.
140 // For basic character types leaves having a trailing eos.
141 template <class _CharT>
142 inline bool _S_is_basic_char_type(_CharT*) { return false; }
143 template <class _CharT>
144 inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
146 inline bool _S_is_basic_char_type(char*) { return true; }
147 inline bool _S_is_one_byte_char_type(char*) { return true; }
148 # ifdef _STLP_HAS_WCHAR_T
149 inline bool _S_is_basic_char_type(wchar_t*) { return true; }
152 // Store an eos iff _CharT is a basic character type.
153 // Do not reference _S_eos if it isn't.
154 template <class _CharT>
155 inline void _S_cond_store_eos(_CharT&) {}
157 inline void _S_cond_store_eos(char& __c) { __c = 0; }
158 # ifdef _STLP_HAS_WCHAR_T
159 inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
162 // char_producers are logically functions that generate a section of
163 // a string. These can be convereted to ropes. The resulting rope
164 // invokes the char_producer on demand. This allows, for example,
165 // files to be viewed as ropes without reading the entire file.
166 template <class _CharT>
167 class char_producer {
169 virtual ~char_producer() {};
170 virtual void operator()(size_t __start_pos, size_t __len,
171 _CharT* __buffer) = 0;
172 // Buffer should really be an arbitrary output iterator.
173 // That way we could flatten directly into an ostream, etc.
174 // This is thoroughly impossible, since iterator types don't
175 // have runtime descriptions.
180 // Sequence must provide an append operation that appends an
181 // array to the sequence. Sequence buffers are useful only if
182 // appending an entire array is cheaper than appending element by element.
183 // This is true for many string representations.
184 // This should perhaps inherit from ostream<sequence::value_type>
185 // and be implemented correspondingly, so that they can be used
186 // for formatted. For the sake of portability, we don't do this yet.
188 // For now, sequence buffers behave as output iterators. But they also
189 // behave a little like basic_ostringstream<sequence::value_type> and a
190 // little like containers.
192 template<class _Sequence
193 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
194 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
195 , size_t _Buf_sz = 100
196 # if defined(__sgi) && !defined(__GNUC__)
197 # define __TYPEDEF_WORKAROUND
198 ,class _V = typename _Sequence::value_type
200 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
202 // The 3rd parameter works around a common compiler bug.
203 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
205 # ifndef __TYPEDEF_WORKAROUND
206 typedef typename _Sequence::value_type value_type;
207 typedef sequence_buffer<_Sequence
208 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
209 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
212 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
214 enum { _Buf_sz = 100};
215 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
217 # else /* __TYPEDEF_WORKAROUND */
218 typedef _V value_type;
219 typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
220 # endif /* __TYPEDEF_WORKAROUND */
222 _Sequence* _M_prefix;
223 value_type _M_buffer[_Buf_sz];
227 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
230 ~sequence_buffer() { flush(); }
231 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
232 sequence_buffer(const _Self& __x) {
233 _M_prefix = __x._M_prefix;
234 _M_buf_count = __x._M_buf_count;
235 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
237 sequence_buffer(_Self& __x) {
239 _M_prefix = __x._M_prefix;
242 sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
243 _Self& operator= (_Self& __x) {
245 _M_prefix = __x._M_prefix;
249 _Self& operator= (const _Self& __x) {
250 _M_prefix = __x._M_prefix;
251 _M_buf_count = __x._M_buf_count;
252 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
255 void push_back(value_type __x)
257 if (_M_buf_count < _Buf_sz) {
258 _M_buffer[_M_buf_count] = __x;
266 void append(value_type* __s, size_t __len)
268 if (__len + _M_buf_count <= _Buf_sz) {
269 size_t __i = _M_buf_count;
271 for (; __j < __len; __i++, __j++) {
272 _M_buffer[__i] = __s[__j];
274 _M_buf_count += __len;
275 } else if (0 == _M_buf_count) {
276 _M_prefix->append(__s, __s + __len);
282 _Self& write(value_type* __s, size_t __len)
287 _Self& put(value_type __x)
292 _Self& operator=(const value_type& __rhs)
297 _Self& operator*() { return *this; }
298 _Self& operator++() { return *this; }
299 _Self& operator++(int) { return *this; }
302 // The following should be treated as private, at least for now.
303 template<class _CharT>
304 class _Rope_char_consumer {
306 // If we had member templates, these should not be virtual.
307 // For now we need to use run-time parametrization where
308 // compile-time would do. _Hence this should all be private
310 // The symmetry with char_producer is accidental and temporary.
311 virtual ~_Rope_char_consumer() {};
312 virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
316 // What follows should really be local to rope. Unfortunately,
317 // that doesn't work, since it makes it impossible to define generic
318 // equality on rope iterators. According to the draft standard, the
319 // template parameters for such an equality operator cannot be inferred
320 // from the occurence of a member class as a parameter.
321 // (SGI compilers in fact allow this, but the __result wouldn't be
323 // Similarly, some of the static member functions are member functions
324 // only to avoid polluting the global namespace, and to circumvent
325 // restrictions on type inference for template functions.
329 // The internal data structure for representing a rope. This is
330 // private to the implementation. A rope is really just a pointer
333 // A few basic functions for manipulating this data structure
334 // are members of _RopeRep. Most of the more complex algorithms
335 // are implemented as rope members.
337 // Some of the static member functions of _RopeRep have identically
338 // named functions in rope that simply invoke the _RopeRep versions.
340 // A macro to introduce various allocation and deallocation functions
341 // These need to be defined differently depending on whether or not
342 // we are using standard conforming allocators, and whether the allocator
343 // instances have real state. Thus this macro is invoked repeatedly
344 // with different definitions of __ROPE_DEFINE_ALLOC.
346 #if defined (_STLP_MEMBER_TEMPLATE_CLASSES)
347 # define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy) \
349 _Alloc_traits<_Tp,_Alloc>::allocator_type __name##Allocator;
351 #define __ROPE_DEFINE_ALLOCS(__a, _M_proxy) \
352 __ROPE_DEFINE_ALLOC(_CharT,_Data, _M_proxy) /* character data */ \
353 typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
354 __ROPE_DEFINE_ALLOC(__C,_C, _M_proxy) \
355 typedef _Rope_RopeLeaf<_CharT,__a> __L; \
356 __ROPE_DEFINE_ALLOC(__L,_L, _M_proxy) \
357 typedef _Rope_RopeFunction<_CharT,__a> __F; \
358 __ROPE_DEFINE_ALLOC(__F,_F, _M_proxy) \
359 typedef _Rope_RopeSubstring<_CharT,__a> __S; \
360 __ROPE_DEFINE_ALLOC(__S,_S,_M_proxy)
362 #define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy)
363 #define __ROPE_DEFINE_ALLOCS(__a, _M_proxy)
367 template<class _CharT, class _Alloc>
370 : public _Refcount_Base
373 typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
375 # define __ROPE_MAX_DEPTH 45
376 # define __ROPE_DEPTH_SIZE 46
377 enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
378 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
379 // Apparently needed by VC++
380 // The data fields of leaves are allocated with some
381 // extra space, to accomodate future growth and for basic
382 // character types, to hold a trailing eos character.
383 enum { _S_alloc_granularity = 8 };
387 bool _M_is_balanced:8;
389 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
390 typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type
393 allocator_type get_allocator() const { return allocator_type(_M_size); }
395 unsigned char _M_depth;
396 __GC_CONST _CharT* _M_c_string;
397 _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
399 # ifdef _STLP_NO_ARROW_OPERATOR
400 _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
403 /* Flattened version of string, if needed. */
405 /* If it's not 0, then the memory is owned */
407 /* In the case of a leaf, this may point to */
408 /* the same memory as the data field. */
409 _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t _p_size,
410 allocator_type __a) :
414 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
420 // fbp : moved from RopeLeaf
421 static size_t _S_rounded_up_size(size_t __n) {
422 size_t __size_with_eos;
424 if (_S_is_basic_char_type((_CharT*)0)) {
425 __size_with_eos = __n + 1;
427 __size_with_eos = __n;
430 return __size_with_eos;
432 // Allow slop for in-place expansion.
433 return (__size_with_eos + _S_alloc_granularity-1)
434 &~ (_S_alloc_granularity-1);
438 static void _S_free_string(__GC_CONST _CharT* __s, size_t __len,
439 allocator_type __a) {
441 if (!_S_is_basic_char_type((_CharT*)0)) {
442 _STLP_STD::_Destroy(__s, __s + __len);
444 // This has to be a static member, so this gets a bit messy
445 # ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
446 __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
448 __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
452 // Deallocate data section of a leaf.
453 // This shouldn't be a member function.
454 // But its hard to do anything else at the
455 // moment, because it's templatized w.r.t.
457 // Does nothing if __GC is defined.
459 void _M_free_c_string();
461 // Deallocate t. Assumes t is not 0.
462 void _M_unref_nonnil()
464 _M_decr(); if (!_M_ref_count) _M_free_tree();
470 static void _S_unref(_Self* __t)
473 __t->_M_unref_nonnil();
476 static void _S_ref(_Self* __t)
478 if (0 != __t) __t->_M_incr();
480 static void _S_free_if_unref(_Self* __t)
482 if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
485 void _M_unref_nonnil() {}
486 void _M_ref_nonnil() {}
487 static void _S_unref(_Self*) {}
488 static void _S_ref(_Self*) {}
489 static void _S_free_if_unref(_Self*) {}
492 __ROPE_DEFINE_ALLOCS(_Alloc, _M_size)
495 template<class _CharT, class _Alloc>
496 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
498 __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
499 /* The allocated size is */
500 /* _S_rounded_up_size(size), except */
501 /* in the GC case, in which it */
502 /* doesn't matter. */
503 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
504 typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
505 _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t _p_size, allocator_type __a)
506 : _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_leaf, 0, true, _p_size, __a),
509 _STLP_ASSERT(_p_size > 0)
510 if (_S_is_basic_char_type((_CharT *)0)) {
511 // already eos terminated.
512 this->_M_c_string = __d;
516 # ifdef _STLP_NO_ARROW_OPERATOR
518 _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
521 // The constructor assumes that d has been allocated with
522 // the proper allocator and the properly padded size.
523 // In contrast, the destructor deallocates the data:
526 if (_M_data != this->_M_c_string) {
527 this->_M_free_c_string();
529 _Rope_RopeRep<_CharT,_Alloc>::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
534 template<class _CharT, class _Alloc>
535 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
537 _Rope_RopeRep<_CharT,_Alloc>* _M_left;
538 _Rope_RopeRep<_CharT,_Alloc>* _M_right;
539 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
540 typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
541 _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
542 _Rope_RopeRep<_CharT,_Alloc>* __r,
544 : _Rope_RopeRep<_CharT,_Alloc>(
545 _Rope_RopeRep<_CharT,_Alloc>::_S_concat,
546 (max)(__l->_M_depth, __r->_M_depth) + 1, false,
547 __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
549 # ifdef _STLP_NO_ARROW_OPERATOR
550 _Rope_RopeConcatenation() {}
551 _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
555 ~_Rope_RopeConcatenation() {
556 this->_M_free_c_string();
557 _M_left->_M_unref_nonnil();
558 _M_right->_M_unref_nonnil();
563 template<class _CharT, class _Alloc>
564 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
566 char_producer<_CharT>* _M_fn;
568 bool _M_delete_when_done; // Char_producer is owned by the
569 // rope and should be explicitly
570 // deleted when the rope becomes
573 // In the GC case, we either register the rope for
574 // finalization, or not. Thus the field is unnecessary;
575 // the information is stored in the collector data structures.
576 // We do need a finalization procedure to be invoked by the
578 static void _S_fn_finalization_proc(void * __tree, void *) {
579 delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
582 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
583 typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
584 # ifdef _STLP_NO_ARROW_OPERATOR
585 _Rope_RopeFunction() {}
586 _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
589 _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
590 bool __d, allocator_type __a)
592 _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_function, 0, true, _p_size, __a),
595 , _M_delete_when_done(__d)
598 _STLP_ASSERT(_p_size > 0)
601 GC_REGISTER_FINALIZER(
602 this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
607 ~_Rope_RopeFunction() {
608 this->_M_free_c_string();
609 if (_M_delete_when_done) {
615 // Substring results are usually represented using just
616 // concatenation nodes. But in the case of very long flat ropes
617 // or ropes with a functional representation that isn't practical.
618 // In that case, we represent the __result as a special case of
619 // RopeFunction, whose char_producer points back to the rope itself.
620 // In all cases except repeated substring operations and
621 // deallocation, we treat the __result as a RopeFunction.
622 template<class _CharT, class _Alloc>
623 # if ( defined (__IBMCPP__) && (__IBMCPP__ == 500) ) // JFA 10-Aug-2000 for some reason xlC cares about the order
624 struct _Rope_RopeSubstring : public char_producer<_CharT> , public _Rope_RopeFunction<_CharT,_Alloc>
626 struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
627 public char_producer<_CharT>
631 // XXX this whole class should be rewritten.
632 typedef _Rope_RopeRep<_CharT,_Alloc> _Base;
633 _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
635 virtual void operator()(size_t __start_pos, size_t __req_len,
637 switch(_M_base->_M_tag) {
638 case _Base::_S_function:
639 case _Base::_S_substringfn:
641 char_producer<_CharT>* __fn =
642 ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
643 _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
644 _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
645 (*__fn)(__start_pos + _M_start, __req_len, __buffer);
650 __GC_CONST _CharT* __s =
651 ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
652 uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
662 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
663 typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
665 _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
666 size_t __l, allocator_type __a)
667 : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
672 _STLP_ASSERT(__l > 0)
673 _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
675 _M_base->_M_ref_nonnil();
677 this->_M_tag = _Base::_S_substringfn;
679 virtual ~_Rope_RopeSubstring()
682 _M_base->_M_unref_nonnil();
687 // Self-destructing pointers to Rope_rep.
688 // These are not conventional smart pointers. Their
689 // only purpose in life is to ensure that unref is called
690 // on the pointer either at normal exit or if an exception
691 // is raised. It is the caller's responsibility to
692 // adjust reference counts when these pointers are initialized
693 // or assigned to. (This convention significantly reduces
694 // the number of potentially expensive reference count
697 template<class _CharT, class _Alloc>
698 struct _Rope_self_destruct_ptr {
699 _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
700 ~_Rope_self_destruct_ptr()
701 { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
702 # ifdef _STLP_USE_EXCEPTIONS
703 _Rope_self_destruct_ptr() : _M_ptr(0) {};
705 _Rope_self_destruct_ptr() {};
707 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
708 _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
709 _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
710 operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
711 _Rope_self_destruct_ptr<_CharT, _Alloc>&
712 operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
713 { _M_ptr = __x; return *this; }
717 // Dereferencing a nonconst iterator has to return something
718 // that behaves almost like a reference. It's not possible to
719 // return an actual reference since assignment requires extra
720 // work. And we would get into the same problems as with the
721 // CD2 version of basic_string.
722 template<class _CharT, class _Alloc>
723 class _Rope_char_ref_proxy {
724 typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
725 friend class rope<_CharT,_Alloc>;
726 friend class _Rope_iterator<_CharT,_Alloc>;
727 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
729 typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
731 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
733 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
734 typedef rope<_CharT,_Alloc> _My_rope;
737 bool _M_current_valid;
738 _My_rope* _M_root; // The whole rope.
740 _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
741 _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
742 _Rope_char_ref_proxy(const _Self& __x) :
743 _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
744 // Don't preserve cache if the reference can outlive the
745 // expression. We claim that's not possible without calling
746 // a copy constructor or generating reference to a proxy
747 // reference. We declare the latter to have undefined semantics.
748 _Rope_char_ref_proxy(_My_rope* __r, size_t __p,
750 _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
751 inline operator _CharT () const;
752 _Self& operator= (_CharT __c);
753 _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
754 _Self& operator= (const _Self& __c) {
755 return operator=((_CharT)__c);
759 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
760 template<class _CharT, class __Alloc>
761 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
762 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
768 // There is no really acceptable way to handle this. The default
769 // definition of swap doesn't work for proxy references.
770 // It can't really be made to work, even with ugly hacks, since
771 // the only unusual operation it uses is the copy constructor, which
772 // is needed for other purposes. We provide a macro for
773 // full specializations, and instantiate the most common case.
774 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
775 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
776 _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
777 _CharT __tmp = __a; \
782 _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
784 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
786 template<class _CharT, class _Alloc>
787 class _Rope_char_ptr_proxy {
788 // XXX this class should be rewritten.
790 typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
791 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
793 rope<_CharT,_Alloc>* _M_root; // The whole rope.
795 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
796 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
797 _Rope_char_ptr_proxy(const _Self& __x)
798 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
799 _Rope_char_ptr_proxy() {}
800 _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
801 _STLP_ASSERT(0 == __x)
804 operator= (const _Self& __x) {
806 _M_root = __x._M_root;
810 _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
811 return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
817 // Unlike in the C version, we cache only part of the stack
818 // for rope iterators, since they must be efficiently copyable.
819 // When we run out of cache, we have to reconstruct the iterator
821 // Pointers from iterators are not included in reference counts.
822 // Iterators are assumed to be thread private. Ropes can
825 template<class _CharT, class _Alloc>
826 class _Rope_iterator_base
827 /* : public random_access_iterator<_CharT, ptrdiff_t> */
829 friend class rope<_CharT,_Alloc>;
830 typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
832 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
833 // Borland doesnt want this to be protected.
835 enum { _S_path_cache_len = 4 }; // Must be <= 9.
836 enum { _S_iterator_buf_len = 15 };
837 size_t _M_current_pos;
838 _RopeRep* _M_root; // The whole rope.
839 size_t _M_leaf_pos; // Starting position for current leaf
840 __GC_CONST _CharT* _M_buf_start;
842 // containing current char.
843 __GC_CONST _CharT* _M_buf_ptr;
844 // Pointer to current char in buffer.
845 // != 0 ==> buffer valid.
846 __GC_CONST _CharT* _M_buf_end;
847 // One past __last valid char in buffer.
848 // What follows is the path cache. We go out of our
849 // way to make this compact.
850 // Path_end contains the bottom section of the path from
851 // the root to the current leaf.
852 const _RopeRep* _M_path_end[_S_path_cache_len];
853 int _M_leaf_index; // Last valid __pos in path_end;
854 // _M_path_end[0] ... _M_path_end[leaf_index-1]
855 // point to concatenation nodes.
856 unsigned char _M_path_directions;
857 // (path_directions >> __i) & 1 is 1
858 // iff we got from _M_path_end[leaf_index - __i - 1]
859 // to _M_path_end[leaf_index - __i] by going to the
860 // __right. Assumes path_cache_len <= 9.
861 _CharT _M_tmp_buf[_S_iterator_buf_len];
862 // Short buffer for surrounding chars.
863 // This is useful primarily for
864 // RopeFunctions. We put the buffer
865 // here to avoid locking in the
866 // multithreaded case.
867 // The cached path is generally assumed to be valid
868 // only if the buffer is valid.
869 static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
870 // Set buffer contents given
872 static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
873 // Set buffer contents and
875 static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
876 // As above, but assumes path
877 // cache is valid for previous posn.
878 _Rope_iterator_base() {}
879 _Rope_iterator_base(_RopeRep* __root, size_t __pos)
880 : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
881 void _M_incr(size_t __n);
882 void _M_decr(size_t __n);
884 size_t index() const { return _M_current_pos; }
885 _Rope_iterator_base(const _Self& __x) {
886 if (0 != __x._M_buf_ptr) {
889 _M_current_pos = __x._M_current_pos;
890 _M_root = __x._M_root;
896 template<class _CharT, class _Alloc> class _Rope_iterator;
898 template<class _CharT, class _Alloc>
899 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
900 friend class rope<_CharT,_Alloc>;
901 typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
902 typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
905 # ifndef _STLP_HAS_NO_NAMESPACES
906 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
907 // The one from the base class may not be directly visible.
909 _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
910 _Rope_iterator_base<_CharT,_Alloc>(
911 __CONST_CAST(_RopeRep*,__root), __pos)
912 // Only nonconst iterators modify root ref count
915 typedef _CharT reference; // Really a value. Returning a reference
916 // Would be a mess, since it would have
917 // to be included in refcount.
918 typedef const _CharT* pointer;
919 typedef _CharT value_type;
920 typedef ptrdiff_t difference_type;
921 typedef random_access_iterator_tag iterator_category;
924 _Rope_const_iterator() {};
925 _Rope_const_iterator(const _Self& __x) :
926 _Rope_iterator_base<_CharT,_Alloc>(__x) { }
927 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
928 _Rope_iterator_base<_CharT,_Alloc>(__x) {}
929 _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
930 _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
931 _Self& operator= (const _Self& __x) {
932 if (0 != __x._M_buf_ptr) {
933 *(__STATIC_CAST(_Base*,this)) = __x;
935 this->_M_current_pos = __x._M_current_pos;
936 this->_M_root = __x._M_root;
937 this->_M_buf_ptr = 0;
941 reference operator*() {
942 if (0 == this->_M_buf_ptr) _S_setcache(*this);
943 return *(this->_M_buf_ptr);
945 _Self& operator++() {
946 __GC_CONST _CharT* __next;
947 if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
948 this->_M_buf_ptr = __next;
949 ++this->_M_current_pos;
955 _Self& operator+=(ptrdiff_t __n) {
963 _Self& operator--() {
967 _Self& operator-=(ptrdiff_t __n) {
975 _Self operator++(int) {
976 size_t __old_pos = this->_M_current_pos;
978 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
979 // This makes a subsequent dereference expensive.
980 // Perhaps we should instead copy the iterator
981 // if it has a valid cache?
983 _Self operator--(int) {
984 size_t __old_pos = this->_M_current_pos;
986 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
988 inline reference operator[](size_t __n);
991 template<class _CharT, class _Alloc>
992 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
993 friend class rope<_CharT,_Alloc>;
994 typedef _Rope_iterator<_CharT, _Alloc> _Self;
995 typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
996 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
999 rope<_CharT,_Alloc>* _M_root_rope;
1000 // root is treated as a cached version of this,
1001 // and is used to detect changes to the underlying
1003 // Root is included in the reference count.
1004 // This is necessary so that we can detect changes reliably.
1005 // Unfortunately, it requires careful bookkeeping for the
1007 _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
1011 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1012 typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1013 typedef _CharT value_type;
1014 typedef ptrdiff_t difference_type;
1015 typedef random_access_iterator_tag iterator_category;
1017 ~_Rope_iterator() //*TY 5/6/00 - added dtor to balance reference count
1019 _RopeRep::_S_unref(this->_M_root);
1022 rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1024 this->_M_root = 0; // Needed for reference counting.
1026 _Rope_iterator(const _Self& __x) :
1027 _Rope_iterator_base<_CharT,_Alloc>(__x) {
1028 _M_root_rope = __x._M_root_rope;
1029 _RopeRep::_S_ref(this->_M_root);
1031 _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1032 _Self& operator= (const _Self& __x) {
1033 _RopeRep* __old = this->_M_root;
1035 _RopeRep::_S_ref(__x._M_root);
1036 if (0 != __x._M_buf_ptr) {
1037 _M_root_rope = __x._M_root_rope;
1038 *(__STATIC_CAST(_Base*,this)) = __x;
1040 this->_M_current_pos = __x._M_current_pos;
1041 this->_M_root = __x._M_root;
1042 _M_root_rope = __x._M_root_rope;
1043 this->_M_buf_ptr = 0;
1045 _RopeRep::_S_unref(__old);
1048 reference operator*() {
1050 if (0 == this->_M_buf_ptr) {
1051 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1052 _M_root_rope, this->_M_current_pos);
1054 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1055 _M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
1058 _Self& operator++() {
1062 _Self& operator+=(ptrdiff_t __n) {
1066 this->_M_decr(-__n);
1070 _Self& operator--() {
1074 _Self& operator-=(ptrdiff_t __n) {
1078 this->_M_incr(-__n);
1082 _Self operator++(int) {
1083 size_t __old_pos = this->_M_current_pos;
1085 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1087 _Self operator--(int) {
1088 size_t __old_pos = this->_M_current_pos;
1090 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1092 reference operator[](ptrdiff_t __n) {
1093 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1094 _M_root_rope, this->_M_current_pos + __n);
1098 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1099 template <class _CharT, class _Alloc>
1100 inline random_access_iterator_tag
1101 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
1102 template <class _CharT, class _Alloc>
1103 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1104 template <class _CharT, class _Alloc>
1105 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1106 template <class _CharT, class _Alloc>
1107 inline random_access_iterator_tag
1108 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1109 template <class _CharT, class _Alloc>
1110 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1111 template <class _CharT, class _Alloc>
1112 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1115 template <class _CharT, class _Alloc>
1117 typedef rope<_CharT,_Alloc> _Self;
1119 typedef _CharT value_type;
1120 typedef ptrdiff_t difference_type;
1121 typedef size_t size_type;
1122 typedef _CharT const_reference;
1123 typedef const _CharT* const_pointer;
1124 typedef _Rope_iterator<_CharT,_Alloc> iterator;
1125 typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1126 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1127 typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1129 friend class _Rope_iterator<_CharT,_Alloc>;
1130 friend class _Rope_const_iterator<_CharT,_Alloc>;
1131 friend struct _Rope_RopeRep<_CharT,_Alloc>;
1132 friend class _Rope_iterator_base<_CharT,_Alloc>;
1133 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1134 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1135 friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1137 _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1140 typedef __GC_CONST _CharT* _Cstrptr;
1142 static _CharT _S_empty_c_str[1];
1144 static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1145 enum { _S_copy_max = 23 };
1146 // For strings shorter than _S_copy_max, we copy to
1150 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1151 _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1152 typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1153 allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1155 // The only data member of a rope:
1156 _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1158 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1159 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1160 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1161 typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1165 // Retrieve a character at the indicated position.
1166 static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1169 // Obtain a pointer to the character at the indicated position.
1170 // The pointer can be used to change the character.
1171 // If such a pointer cannot be produced, as is frequently the
1172 // case, 0 is returned instead.
1173 // (Returns nonzero only if all nodes in the path have a refcount
1175 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1178 static bool _S_apply_to_pieces(
1179 // should be template parameter
1180 _Rope_char_consumer<_CharT>& __c,
1181 const _RopeRep* __r,
1182 size_t __begin, size_t __end);
1183 // begin and end are assumed to be in range.
1186 static void _S_unref(_RopeRep* __t)
1188 _RopeRep::_S_unref(__t);
1190 static void _S_ref(_RopeRep* __t)
1192 _RopeRep::_S_ref(__t);
1195 static void _S_unref(_RopeRep*) {}
1196 static void _S_ref(_RopeRep*) {}
1201 typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1203 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1206 // _Result is counted in refcount.
1207 static _RopeRep* _S_substring(_RopeRep* __base,
1208 size_t __start, size_t __endp1);
1210 static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1211 const _CharT* __iter, size_t __slen);
1212 // Concatenate rope and char ptr, copying __s.
1213 // Should really take an arbitrary iterator.
1214 // Result is counted in refcount.
1215 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1216 const _CharT* __iter, size_t __slen)
1217 // As above, but one reference to __r is about to be
1218 // destroyed. Thus the pieces may be recycled if all
1219 // relevent reference counts are 1.
1221 // We can't really do anything since refcounts are unavailable.
1222 { return _S_concat_char_iter(__r, __iter, __slen); }
1227 static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1228 // General concatenation on _RopeRep. _Result
1229 // has refcount of 1. Adjusts argument refcounts.
1232 void apply_to_pieces( size_t __begin, size_t __end,
1233 _Rope_char_consumer<_CharT>& __c) const {
1234 _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end);
1240 static size_t _S_rounded_up_size(size_t __n) {
1241 return _RopeRep::_S_rounded_up_size(__n);
1244 static size_t _S_allocated_capacity(size_t __n) {
1245 if (_S_is_basic_char_type((_CharT*)0)) {
1246 return _S_rounded_up_size(__n) - 1;
1248 return _S_rounded_up_size(__n);
1252 // Allocate and construct a RopeLeaf using the supplied allocator
1253 // Takes ownership of s instead of copying.
1254 static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1255 size_t _p_size, allocator_type __a)
1257 _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _RopeLeaf).allocate(1,(const void*)0);
1259 _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1261 _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1262 _RopeLeaf).deallocate(__space, 1))
1266 static _RopeConcatenation* _S_new_RopeConcatenation(
1267 _RopeRep* __left, _RopeRep* __right,
1270 _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1271 _RopeConcatenation).allocate(1,(const void*)0);
1272 return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1275 static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1276 size_t _p_size, bool __d, allocator_type __a)
1278 _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1279 _RopeFunction).allocate(1,(const void*)0);
1280 return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1283 static _RopeSubstring* _S_new_RopeSubstring(
1284 _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1285 size_t __l, allocator_type __a)
1287 _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1288 _RopeSubstring).allocate(1,(const void*)0);
1289 return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1292 # define _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _p_size, __a) \
1293 _S_RopeLeaf_from_unowned_char_ptr(__s, _p_size, __a)
1296 _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1297 size_t _p_size, allocator_type __a)
1299 if (0 == _p_size) return 0;
1301 _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1303 uninitialized_copy_n(__s, _p_size, __buf);
1304 _S_cond_store_eos(__buf[_p_size]);
1307 return _S_new_RopeLeaf(__buf, _p_size, __a);
1309 _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1311 # if defined (_STLP_THROW_RETURN_BUG)
1317 // Concatenation of nonempty strings.
1318 // Always builds a concatenation node.
1319 // Rebalances if the result is too deep.
1320 // Result has refcount 1.
1321 // Does not increment left and right ref counts even though
1322 // they are referenced.
1324 _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1326 // Concatenation helper functions
1328 _S_leaf_concat_char_iter(_RopeLeaf* __r,
1329 const _CharT* __iter, size_t __slen);
1330 // Concatenate by copying leaf.
1331 // should take an arbitrary iterator
1332 // result has refcount 1.
1334 static _RopeLeaf* _S_destr_leaf_concat_char_iter
1335 (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1336 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1340 // A helper function for exponentiating strings.
1341 // This uses a nonstandard refcount convention.
1342 // The result has refcount 0.
1343 friend struct _Rope_Concat_fn<_CharT,_Alloc>;
1344 typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1347 static size_t _S_char_ptr_len(const _CharT* __s) {
1348 const _CharT* __p = __s;
1350 while (!_S_is0(*__p)) { ++__p; }
1354 public: /* for operators */
1355 rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1356 : _M_tree_ptr(__a, __t) { }
1358 // Copy __r to the _CharT buffer.
1359 // Returns __buffer + __r->_M_size._M_data.
1360 // Assumes that buffer is uninitialized.
1361 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1363 // Again, with explicit starting position and length.
1364 // Assumes that buffer is uninitialized.
1365 static _CharT* _S_flatten(_RopeRep* __r,
1366 size_t __start, size_t __len,
1369 // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1371 static const unsigned long _S_min_len[46];
1373 static bool _S_is_balanced(_RopeRep* __r)
1374 { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1376 static bool _S_is_almost_balanced(_RopeRep* __r)
1377 { return (__r->_M_depth == 0 ||
1378 __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]); }
1380 static bool _S_is_roughly_balanced(_RopeRep* __r)
1381 { return (__r->_M_depth <= 1 ||
1382 __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]); }
1384 // Assumes the result is not empty.
1385 static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1388 _RopeRep* __result = _S_concat_rep(__left, __right);
1389 if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1393 // The basic rebalancing operation. Logically copies the
1394 // rope. The result has refcount of 1. The client will
1395 // usually decrement the reference count of __r.
1396 // The result is within height 2 of balanced by the above
1398 static _RopeRep* _S_balance(_RopeRep* __r);
1400 // Add all unbalanced subtrees to the forest of balanceed trees.
1401 // Used only by balance.
1402 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1404 // Add __r to forest, assuming __r is already balanced.
1405 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1407 // Print to stdout, exposing structure
1408 static void _S_dump(_RopeRep* __r, int __indent = 0);
1410 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1411 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1414 bool empty() const { return 0 == _M_tree_ptr._M_data; }
1416 // Comparison member function. This is public only for those
1417 // clients that need a ternary comparison. Others
1418 // should use the comparison operators below.
1419 int compare(const _Self& __y) const {
1420 return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1423 rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1424 : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),__a))
1427 rope(const _CharT* __s, size_t __len,
1428 const allocator_type& __a = allocator_type())
1429 : _M_tree_ptr(__a, (_STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a)))
1432 // Should perhaps be templatized with respect to the iterator type
1433 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1435 rope(const _CharT *__s, const _CharT *__e,
1436 const allocator_type& __a = allocator_type())
1437 : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a))
1440 rope(const const_iterator& __s, const const_iterator& __e,
1441 const allocator_type& __a = allocator_type())
1442 : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1443 __e._M_current_pos))
1446 rope(const iterator& __s, const iterator& __e,
1447 const allocator_type& __a = allocator_type())
1448 : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1449 __e._M_current_pos))
1452 rope(_CharT __c, const allocator_type& __a = allocator_type())
1453 : _M_tree_ptr(__a, (_RopeRep*)0)
1455 _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1457 _Construct(__buf, __c);
1459 _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1461 _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1464 rope(size_t __n, _CharT __c,
1465 const allocator_type& __a = allocator_type()):
1466 _M_tree_ptr(__a, (_RopeRep*)0) {
1467 rope<_CharT,_Alloc> __result;
1468 # define __exponentiate_threshold size_t(32)
1469 _RopeRep* __remainder;
1470 rope<_CharT,_Alloc> __remainder_rope;
1473 typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1478 size_t __exponent = __n / __exponentiate_threshold;
1479 size_t __rest = __n % __exponentiate_threshold;
1483 _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1484 uninitialized_fill_n(__rest_buffer, __rest, __c);
1485 _S_cond_store_eos(__rest_buffer[__rest]);
1487 __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1489 _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1491 __remainder_rope._M_tree_ptr._M_data = __remainder;
1492 if (__exponent != 0) {
1493 _CharT* __base_buffer =
1494 _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1495 _RopeLeaf* __base_leaf;
1496 rope<_CharT,_Alloc> __base_rope;
1497 uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1498 _S_cond_store_eos(__base_buffer[__exponentiate_threshold]);
1500 __base_leaf = _S_new_RopeLeaf(__base_buffer,
1501 __exponentiate_threshold, __a);
1503 _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1504 __exponentiate_threshold, __a))
1505 __base_rope._M_tree_ptr._M_data = __base_leaf;
1506 if (1 == __exponent) {
1507 __result = __base_rope;
1509 _STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1510 // One each for base_rope and __result
1513 __result = power(__base_rope, __exponent, _Concat_fn());
1515 if (0 != __remainder) {
1516 __result += __remainder_rope;
1519 __result = __remainder_rope;
1521 _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
1522 _M_tree_ptr._M_data->_M_ref_nonnil();
1523 # undef __exponentiate_threshold
1526 rope(const allocator_type& __a = allocator_type())
1527 : _M_tree_ptr(__a, (_RopeRep*)0) {}
1529 // Construct a rope from a function that can compute its members
1530 rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1531 const allocator_type& __a = allocator_type())
1532 : _M_tree_ptr(__a, (_RopeRep*)0)
1534 _M_tree_ptr._M_data = (0 == __len) ?
1535 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1538 rope(const _Self& __x)
1539 : _M_tree_ptr(__x.get_allocator(), __x._M_tree_ptr._M_data)
1541 _S_ref(_M_tree_ptr._M_data);
1546 _S_unref(_M_tree_ptr._M_data);
1549 _Self& operator=(const _Self& __x)
1551 _RopeRep* __old = _M_tree_ptr._M_data;
1552 _STLP_ASSERT(get_allocator() == __x.get_allocator())
1553 _M_tree_ptr._M_data = __x._M_tree_ptr._M_data;
1554 _S_ref(_M_tree_ptr._M_data);
1560 _S_unref(_M_tree_ptr._M_data);
1561 _M_tree_ptr._M_data = 0;
1563 void push_back(_CharT __x)
1565 _RopeRep* __old = _M_tree_ptr._M_data;
1566 _M_tree_ptr._M_data = _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1);
1572 _RopeRep* __old = _M_tree_ptr._M_data;
1573 _M_tree_ptr._M_data =
1574 _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
1580 return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
1583 void push_front(_CharT __x)
1585 _RopeRep* __old = _M_tree_ptr._M_data;
1587 _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
1589 _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
1593 _STLP_UNWIND(_S_unref(__left))
1598 _RopeRep* __old = _M_tree_ptr._M_data;
1599 _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
1603 _CharT front() const
1605 return _S_fetch(_M_tree_ptr._M_data, 0);
1610 _RopeRep* __old = _M_tree_ptr._M_data;
1611 _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
1615 void copy(_CharT* __buffer) const {
1616 _STLP_STD::_Destroy(__buffer, __buffer + size());
1617 _S_flatten(_M_tree_ptr._M_data, __buffer);
1620 // This is the copy function from the standard, but
1621 // with the arguments reordered to make it consistent with the
1622 // rest of the interface.
1623 // Note that this guaranteed not to compile if the draft standard
1624 // order is assumed.
1625 size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1627 size_t _p_size = size();
1628 size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
1630 _STLP_STD::_Destroy(__buffer, __buffer + __len);
1631 _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
1635 // Print to stdout, exposing structure. May be useful for
1636 // performance debugging.
1638 _S_dump(_M_tree_ptr._M_data);
1641 // Convert to 0 terminated string in new allocated memory.
1642 // Embedded 0s in the input do not terminate the copy.
1643 const _CharT* c_str() const;
1645 // As above, but lso use the flattened representation as the
1646 // the new rope representation.
1647 const _CharT* replace_with_c_str();
1649 // Reclaim memory for the c_str generated flattened string.
1650 // Intentionally undocumented, since it's hard to say when this
1651 // is safe for multiple threads.
1652 void delete_c_str () {
1653 if (0 == _M_tree_ptr._M_data) return;
1654 if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
1655 ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
1656 _M_tree_ptr._M_data->_M_c_string) {
1657 // Representation shared
1661 _M_tree_ptr._M_data->_M_free_c_string();
1663 _M_tree_ptr._M_data->_M_c_string = 0;
1666 _CharT operator[] (size_type __pos) const {
1667 return _S_fetch(_M_tree_ptr._M_data, __pos);
1670 _CharT at(size_type __pos) const {
1671 // if (__pos >= size()) throw out_of_range; // XXX
1672 return (*this)[__pos];
1675 const_iterator begin() const {
1676 return(const_iterator(_M_tree_ptr._M_data, 0));
1679 // An easy way to get a const iterator from a non-const container.
1680 const_iterator const_begin() const {
1681 return(const_iterator(_M_tree_ptr._M_data, 0));
1684 const_iterator end() const {
1685 return(const_iterator(_M_tree_ptr._M_data, size()));
1688 const_iterator const_end() const {
1689 return(const_iterator(_M_tree_ptr._M_data, size()));
1692 size_type size() const {
1693 return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
1696 size_type length() const {
1700 size_type max_size() const {
1701 return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
1702 // Guarantees that the result can be sufficirntly
1703 // balanced. Longer ropes will probably still work,
1704 // but it's harder to make guarantees.
1707 const_reverse_iterator rbegin() const {
1708 return const_reverse_iterator(end());
1711 const_reverse_iterator const_rbegin() const {
1712 return const_reverse_iterator(end());
1715 const_reverse_iterator rend() const {
1716 return const_reverse_iterator(begin());
1719 const_reverse_iterator const_rend() const {
1720 return const_reverse_iterator(begin());
1722 // The symmetric cases are intentionally omitted, since they're presumed
1723 // to be less common, and we don't handle them as well.
1725 // The following should really be templatized.
1726 // The first argument should be an input iterator or
1727 // forward iterator with value_type _CharT.
1728 _Self& append(const _CharT* __iter, size_t __n) {
1729 _RopeRep* __result =
1730 _S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n);
1731 _S_unref(_M_tree_ptr._M_data);
1732 _M_tree_ptr._M_data = __result;
1736 _Self& append(const _CharT* __c_string) {
1737 size_t __len = _S_char_ptr_len(__c_string);
1738 append(__c_string, __len);
1742 _Self& append(const _CharT* __s, const _CharT* __e) {
1743 _RopeRep* __result =
1744 _S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s);
1745 _S_unref(_M_tree_ptr._M_data);
1746 _M_tree_ptr._M_data = __result;
1750 _Self& append(const_iterator __s, const_iterator __e) {
1751 _STLP_ASSERT(__s._M_root == __e._M_root)
1752 _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
1753 _Self_destruct_ptr __appendee(_S_substring(
1754 __s._M_root, __s._M_current_pos, __e._M_current_pos));
1755 _RopeRep* __result =
1756 _S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee);
1757 _S_unref(_M_tree_ptr._M_data);
1758 _M_tree_ptr._M_data = __result;
1762 _Self& append(_CharT __c) {
1763 _RopeRep* __result =
1764 _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1);
1765 _S_unref(_M_tree_ptr._M_data);
1766 _M_tree_ptr._M_data = __result;
1770 _Self& append() { return append(_CharT()); } // XXX why?
1772 _Self& append(const _Self& __y) {
1773 _STLP_ASSERT(__y.get_allocator() == get_allocator())
1774 _RopeRep* __result = _S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1775 _S_unref(_M_tree_ptr._M_data);
1776 _M_tree_ptr._M_data = __result;
1780 _Self& append(size_t __n, _CharT __c) {
1781 rope<_CharT,_Alloc> __last(__n, __c);
1782 return append(__last);
1785 void swap(_Self& __b) {
1786 _STLP_ASSERT(get_allocator() == __b.get_allocator())
1787 _RopeRep* __tmp = _M_tree_ptr._M_data;
1788 _M_tree_ptr._M_data = __b._M_tree_ptr._M_data;
1789 __b._M_tree_ptr._M_data = __tmp;
1794 // Result is included in refcount.
1795 static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1796 size_t __pos2, _RopeRep* __r) {
1797 if (0 == __old) { _S_ref(__r); return __r; }
1798 _Self_destruct_ptr __left(
1799 _S_substring(__old, 0, __pos1));
1800 _Self_destruct_ptr __right(
1801 _S_substring(__old, __pos2, __old->_M_size._M_data));
1802 _STLP_MPWFIX_TRY //*TY 06/01/2000 -
1806 __result = _S_concat_rep(__left, __right);
1808 _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
1809 _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
1810 __result = _S_concat_rep(__left_result, __right);
1813 _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
1817 void insert(size_t __p, const _Self& __r) {
1818 _RopeRep* __result =
1819 replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data);
1820 _STLP_ASSERT(get_allocator() == __r.get_allocator())
1821 _S_unref(_M_tree_ptr._M_data);
1822 _M_tree_ptr._M_data = __result;
1825 void insert(size_t __p, size_t __n, _CharT __c) {
1826 rope<_CharT,_Alloc> __r(__n,__c);
1830 void insert(size_t __p, const _CharT* __i, size_t __n) {
1831 _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
1832 _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
1833 _Self_destruct_ptr __left_result(
1834 _S_concat_char_iter(__left, __i, __n));
1835 // _S_ destr_concat_char_iter should be safe here.
1836 // But as it stands it's probably not a win, since __left
1837 // is likely to have additional references.
1838 _RopeRep* __result = _S_concat_rep(__left_result, __right);
1839 _S_unref(_M_tree_ptr._M_data);
1840 _M_tree_ptr._M_data = __result;
1843 void insert(size_t __p, const _CharT* __c_string) {
1844 insert(__p, __c_string, _S_char_ptr_len(__c_string));
1847 void insert(size_t __p, _CharT __c) {
1848 insert(__p, &__c, 1);
1851 void insert(size_t __p) {
1852 _CharT __c = _CharT();
1853 insert(__p, &__c, 1);
1856 void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1857 _Self __r(__i, __j);
1861 void insert(size_t __p, const const_iterator& __i,
1862 const const_iterator& __j) {
1863 _Self __r(__i, __j);
1867 void insert(size_t __p, const iterator& __i,
1868 const iterator& __j) {
1869 _Self __r(__i, __j);
1873 // (position, length) versions of replace operations:
1875 void replace(size_t __p, size_t __n, const _Self& __r) {
1876 _RopeRep* __result =
1877 replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data);
1878 _S_unref(_M_tree_ptr._M_data);
1879 _M_tree_ptr._M_data = __result;
1882 void replace(size_t __p, size_t __n,
1883 const _CharT* __i, size_t __i_len) {
1884 _Self __r(__i, __i_len);
1885 replace(__p, __n, __r);
1888 void replace(size_t __p, size_t __n, _CharT __c) {
1890 replace(__p, __n, __r);
1893 void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1894 _Self __r(__c_string);
1895 replace(__p, __n, __r);
1898 void replace(size_t __p, size_t __n,
1899 const _CharT* __i, const _CharT* __j) {
1900 _Self __r(__i, __j);
1901 replace(__p, __n, __r);
1904 void replace(size_t __p, size_t __n,
1905 const const_iterator& __i, const const_iterator& __j) {
1906 _Self __r(__i, __j);
1907 replace(__p, __n, __r);
1910 void replace(size_t __p, size_t __n,
1911 const iterator& __i, const iterator& __j) {
1912 _Self __r(__i, __j);
1913 replace(__p, __n, __r);
1916 // Single character variants:
1917 void replace(size_t __p, _CharT __c) {
1918 iterator __i(this, __p);
1922 void replace(size_t __p, const _Self& __r) {
1923 replace(__p, 1, __r);
1926 void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1927 replace(__p, 1, __i, __i_len);
1930 void replace(size_t __p, const _CharT* __c_string) {
1931 replace(__p, 1, __c_string);
1934 void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1935 replace(__p, 1, __i, __j);
1938 void replace(size_t __p, const const_iterator& __i,
1939 const const_iterator& __j) {
1940 replace(__p, 1, __i, __j);
1943 void replace(size_t __p, const iterator& __i,
1944 const iterator& __j) {
1945 replace(__p, 1, __i, __j);
1948 // Erase, (position, size) variant.
1949 void erase(size_t __p, size_t __n) {
1950 _RopeRep* __result = replace(_M_tree_ptr._M_data, __p, __p + __n, 0);
1951 _S_unref(_M_tree_ptr._M_data);
1952 _M_tree_ptr._M_data = __result;
1955 // Erase, single character
1956 void erase(size_t __p) {
1957 erase(__p, __p + 1);
1960 // Insert, iterator variants.
1961 iterator insert(const iterator& __p, const _Self& __r)
1962 { insert(__p.index(), __r); return __p; }
1963 iterator insert(const iterator& __p, size_t __n, _CharT __c)
1964 { insert(__p.index(), __n, __c); return __p; }
1965 iterator insert(const iterator& __p, _CharT __c)
1966 { insert(__p.index(), __c); return __p; }
1967 iterator insert(const iterator& __p )
1968 { insert(__p.index()); return __p; }
1969 iterator insert(const iterator& __p, const _CharT* c_string)
1970 { insert(__p.index(), c_string); return __p; }
1971 iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1972 { insert(__p.index(), __i, __n); return __p; }
1973 iterator insert(const iterator& __p, const _CharT* __i,
1975 { insert(__p.index(), __i, __j); return __p; }
1976 iterator insert(const iterator& __p,
1977 const const_iterator& __i, const const_iterator& __j)
1978 { insert(__p.index(), __i, __j); return __p; }
1979 iterator insert(const iterator& __p,
1980 const iterator& __i, const iterator& __j)
1981 { insert(__p.index(), __i, __j); return __p; }
1983 // Replace, range variants.
1984 void replace(const iterator& __p, const iterator& __q,
1986 { replace(__p.index(), __q.index() - __p.index(), __r); }
1987 void replace(const iterator& __p, const iterator& __q, _CharT __c)
1988 { replace(__p.index(), __q.index() - __p.index(), __c); }
1989 void replace(const iterator& __p, const iterator& __q,
1990 const _CharT* __c_string)
1991 { replace(__p.index(), __q.index() - __p.index(), __c_string); }
1992 void replace(const iterator& __p, const iterator& __q,
1993 const _CharT* __i, size_t __n)
1994 { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
1995 void replace(const iterator& __p, const iterator& __q,
1996 const _CharT* __i, const _CharT* __j)
1997 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1998 void replace(const iterator& __p, const iterator& __q,
1999 const const_iterator& __i, const const_iterator& __j)
2000 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2001 void replace(const iterator& __p, const iterator& __q,
2002 const iterator& __i, const iterator& __j)
2003 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2005 // Replace, iterator variants.
2006 void replace(const iterator& __p, const _Self& __r)
2007 { replace(__p.index(), __r); }
2008 void replace(const iterator& __p, _CharT __c)
2009 { replace(__p.index(), __c); }
2010 void replace(const iterator& __p, const _CharT* __c_string)
2011 { replace(__p.index(), __c_string); }
2012 void replace(const iterator& __p, const _CharT* __i, size_t __n)
2013 { replace(__p.index(), __i, __n); }
2014 void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
2015 { replace(__p.index(), __i, __j); }
2016 void replace(const iterator& __p, const_iterator __i,
2018 { replace(__p.index(), __i, __j); }
2019 void replace(const iterator& __p, iterator __i, iterator __j)
2020 { replace(__p.index(), __i, __j); }
2022 // Iterator and range variants of erase
2023 iterator erase(const iterator& __p, const iterator& __q) {
2024 size_t __p_index = __p.index();
2025 erase(__p_index, __q.index() - __p_index);
2026 return iterator(this, __p_index);
2028 iterator erase(const iterator& __p) {
2029 size_t __p_index = __p.index();
2030 erase(__p_index, 1);
2031 return iterator(this, __p_index);
2034 _Self substr(size_t __start, size_t __len = 1) const {
2035 return rope<_CharT,_Alloc>(
2036 _S_substring(_M_tree_ptr._M_data, __start, __start + __len));
2039 _Self substr(iterator __start, iterator __end) const {
2040 return rope<_CharT,_Alloc>(
2041 _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
2044 _Self substr(iterator __start) const {
2045 size_t __pos = __start.index();
2046 return rope<_CharT,_Alloc>(
2047 _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
2050 _Self substr(const_iterator __start, const_iterator __end) const {
2051 // This might eventually take advantage of the cache in the
2053 return rope<_CharT,_Alloc>(
2054 _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
2057 rope<_CharT,_Alloc> substr(const_iterator __start) {
2058 size_t __pos = __start.index();
2059 return rope<_CharT,_Alloc>(
2060 _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
2065 // static const size_type npos;
2067 size_type find(_CharT __c, size_type __pos = 0) const;
2068 size_type find(const _CharT* __s, size_type __pos = 0) const {
2069 size_type __result_pos;
2070 const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
2071 __s, __s + _S_char_ptr_len(__s));
2072 __result_pos = __result.index();
2073 # ifndef _STLP_OLD_ROPE_SEMANTICS
2074 if (__result_pos == size()) __result_pos = npos;
2076 return __result_pos;
2079 iterator mutable_begin() {
2080 return(iterator(this, 0));
2083 iterator mutable_end() {
2084 return(iterator(this, size()));
2087 reverse_iterator mutable_rbegin() {
2088 return reverse_iterator(mutable_end());
2091 reverse_iterator mutable_rend() {
2092 return reverse_iterator(mutable_begin());
2095 reference mutable_reference_at(size_type __pos) {
2096 return reference(this, __pos);
2100 reference operator[] (size_type __pos) {
2101 return reference(this, __pos);
2104 reference at(size_type __pos) {
2105 // if (__pos >= size()) throw out_of_range; // XXX
2106 return (*this)[__pos];
2109 void resize(size_type, _CharT) {}
2110 void resize(size_type) {}
2111 void reserve(size_type = 0) {}
2112 size_type capacity() const {
2116 // Stuff below this line is dangerous because it's error prone.
2117 // I would really like to get rid of it.
2118 // copy function with funny arg ordering.
2119 size_type copy(_CharT* __buffer, size_type __n,
2120 size_type __pos = 0) const {
2121 return copy(__pos, __n, __buffer);
2124 iterator end() { return mutable_end(); }
2126 iterator begin() { return mutable_begin(); }
2128 reverse_iterator rend() { return mutable_rend(); }
2130 reverse_iterator rbegin() { return mutable_rbegin(); }
2134 const_iterator end() { return const_end(); }
2136 const_iterator begin() { return const_begin(); }
2138 const_reverse_iterator rend() { return const_rend(); }
2140 const_reverse_iterator rbegin() { return const_rbegin(); }
2144 __ROPE_DEFINE_ALLOCS(_Alloc, _M_tree_ptr)
2147 # undef __ROPE_DEFINE_ALLOC
2148 # undef __ROPE_DEFINE_ALLOCS
2150 template <class _CharT, class _Alloc>
2152 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
2154 return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n);
2157 template <class _CharT, class _Alloc>
2158 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2159 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2160 return (__x._M_current_pos == __y._M_current_pos &&
2161 __x._M_root == __y._M_root);
2164 template <class _CharT, class _Alloc>
2165 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2166 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2167 return (__x._M_current_pos < __y._M_current_pos);
2170 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2172 template <class _CharT, class _Alloc>
2173 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2174 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2175 return !(__x == __y);
2178 template <class _CharT, class _Alloc>
2179 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2180 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2184 template <class _CharT, class _Alloc>
2185 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2186 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2187 return !(__y < __x);
2190 template <class _CharT, class _Alloc>
2191 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2192 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2193 return !(__x < __y);
2196 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2198 template <class _CharT, class _Alloc>
2199 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2200 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2201 return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2204 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2205 template <class _CharT, class _Alloc>
2206 inline _Rope_const_iterator<_CharT,_Alloc>
2207 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2208 return _Rope_const_iterator<_CharT,_Alloc>(
2209 __x._M_root, __x._M_current_pos - __n);
2213 template <class _CharT, class _Alloc>
2214 inline _Rope_const_iterator<_CharT,_Alloc>
2215 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2216 return _Rope_const_iterator<_CharT,_Alloc>(
2217 __x._M_root, __x._M_current_pos + __n);
2220 template <class _CharT, class _Alloc>
2221 inline _Rope_const_iterator<_CharT,_Alloc>
2222 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
2223 return _Rope_const_iterator<_CharT,_Alloc>(
2224 __x._M_root, __x._M_current_pos + __n);
2227 template <class _CharT, class _Alloc>
2228 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2229 const _Rope_iterator<_CharT,_Alloc>& __y) {
2230 return (__x._M_current_pos == __y._M_current_pos &&
2231 __x._M_root_rope == __y._M_root_rope);
2234 template <class _CharT, class _Alloc>
2235 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2236 const _Rope_iterator<_CharT,_Alloc>& __y) {
2237 return (__x._M_current_pos < __y._M_current_pos);
2240 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2242 template <class _CharT, class _Alloc>
2243 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
2244 const _Rope_iterator<_CharT,_Alloc>& __y) {
2245 return !(__x == __y);
2248 template <class _CharT, class _Alloc>
2249 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
2250 const _Rope_iterator<_CharT,_Alloc>& __y) {
2254 template <class _CharT, class _Alloc>
2255 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
2256 const _Rope_iterator<_CharT,_Alloc>& __y) {
2257 return !(__y < __x);
2260 template <class _CharT, class _Alloc>
2261 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
2262 const _Rope_iterator<_CharT,_Alloc>& __y) {
2263 return !(__x < __y);
2266 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2268 template <class _CharT, class _Alloc>
2269 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2270 const _Rope_iterator<_CharT,_Alloc>& __y) {
2271 return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2274 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2275 template <class _CharT, class _Alloc>
2276 inline _Rope_iterator<_CharT,_Alloc>
2277 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2279 return _Rope_iterator<_CharT,_Alloc>(
2280 __x._M_root_rope, __x._M_current_pos - __n);
2284 template <class _CharT, class _Alloc>
2285 inline _Rope_iterator<_CharT,_Alloc>
2286 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2288 return _Rope_iterator<_CharT,_Alloc>(
2289 __x._M_root_rope, __x._M_current_pos + __n);
2292 template <class _CharT, class _Alloc>
2293 inline _Rope_iterator<_CharT,_Alloc>
2294 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2295 return _Rope_iterator<_CharT,_Alloc>(
2296 __x._M_root_rope, __x._M_current_pos + __n);
2299 template <class _CharT, class _Alloc>
2302 operator+ (const rope<_CharT,_Alloc>& __left,
2303 const rope<_CharT,_Alloc>& __right)
2305 _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
2306 return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
2307 // Inlining this should make it possible to keep __left and
2308 // __right in registers.
2311 template <class _CharT, class _Alloc>
2313 rope<_CharT,_Alloc>&
2314 operator+= (rope<_CharT,_Alloc>& __left,
2315 const rope<_CharT,_Alloc>& __right)
2317 __left.append(__right);
2321 template <class _CharT, class _Alloc>
2324 operator+ (const rope<_CharT,_Alloc>& __left,
2325 const _CharT* __right) {
2326 size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2327 return rope<_CharT,_Alloc>(
2328 rope<_CharT,_Alloc>::_S_concat_char_iter(
2329 __left._M_tree_ptr._M_data, __right, __rlen));
2332 template <class _CharT, class _Alloc>
2334 rope<_CharT,_Alloc>&
2335 operator+= (rope<_CharT,_Alloc>& __left,
2336 const _CharT* __right) {
2337 __left.append(__right);
2341 template <class _CharT, class _Alloc>
2344 operator+ (const rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
2345 return rope<_CharT,_Alloc>(
2346 rope<_CharT,_Alloc>::_S_concat_char_iter(
2347 __left._M_tree_ptr._M_data, &__right, 1));
2350 template <class _CharT, class _Alloc>
2352 rope<_CharT,_Alloc>&
2353 operator+= (rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
2354 __left.append(__right);
2358 template <class _CharT, class _Alloc>
2360 operator< (const rope<_CharT,_Alloc>& __left,
2361 const rope<_CharT,_Alloc>& __right) {
2362 return __left.compare(__right) < 0;
2365 template <class _CharT, class _Alloc>
2367 operator== (const rope<_CharT,_Alloc>& __left,
2368 const rope<_CharT,_Alloc>& __right) {
2369 return __left.compare(__right) == 0;
2372 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2374 template <class _CharT, class _Alloc>
2376 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2377 return !(__x == __y);
2380 template <class _CharT, class _Alloc>
2382 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2386 template <class _CharT, class _Alloc>
2388 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2389 return !(__y < __x);
2392 template <class _CharT, class _Alloc>
2394 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2395 return !(__x < __y);
2398 template <class _CharT, class _Alloc>
2399 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2400 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2401 return !(__x == __y);
2404 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2406 template <class _CharT, class _Alloc>
2407 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2408 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2409 return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2412 #ifdef _STLP_USE_NEW_IOSTREAMS
2413 template<class _CharT, class _Traits, class _Alloc>
2414 basic_ostream<_CharT, _Traits>& operator<< (
2415 basic_ostream<_CharT, _Traits>& __o,
2416 const rope<_CharT, _Alloc>& __r);
2417 #elif ! defined (_STLP_USE_NO_IOSTREAMS)
2418 template<class _CharT, class _Alloc>
2419 ostream& operator<< (ostream& __o, const rope<_CharT,_Alloc>& __r);
2422 typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
2423 # ifdef _STLP_HAS_WCHAR_T
2424 typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
2427 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2429 return __c.mutable_reference_at(__i);
2432 # ifdef _STLP_HAS_WCHAR_T
2433 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2435 return __c.mutable_reference_at(__i);
2439 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
2441 template <class _CharT, class _Alloc>
2442 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
2447 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
2448 # ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
2449 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
2452 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
2455 // Hash functions should probably be revisited later:
2456 _STLP_TEMPLATE_NULL struct hash<crope>
2458 size_t operator()(const crope& __str) const
2460 size_t _p_size = __str.size();
2462 if (0 == _p_size) return 0;
2463 return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
2467 # ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
2468 _STLP_TEMPLATE_NULL struct hash<wrope>
2470 size_t operator()(const wrope& __str) const
2472 size_t _p_size = __str.size();
2474 if (0 == _p_size) return 0;
2475 return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
2481 // I couldn't get this to work with VC++
2482 template<class _CharT,class _Alloc>
2484 _Rope_rotate(_Rope_iterator<_CharT,_Alloc> __first,
2485 _Rope_iterator<_CharT,_Alloc> __middle,
2486 _Rope_iterator<_CharT,_Alloc> __last);
2488 #if !defined(__GNUC__)
2489 // Appears to confuse g++
2490 inline void rotate(_Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __first,
2491 _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __middle,
2492 _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __last) {
2493 _Rope_rotate(__first, __middle, __last);
2499 template <class _CharT, class _Alloc>
2500 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const
2502 if (_M_current_valid) {
2505 return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
2510 # if !defined (_STLP_LINK_TIME_INSTANTIATION)
2511 # include <stl/_rope.c>
2514 # endif /* _STLP_INTERNAL_ROPE_H */