1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1.2 +++ b/epoc32/include/tools/stlport/stl/_rope.h Wed Mar 31 12:33:34 2010 +0100
1.3 @@ -0,0 +1,2362 @@
1.4 +/*
1.5 + *
1.6 + * Copyright (c) 1996,1997
1.7 + * Silicon Graphics Computer Systems, Inc.
1.8 + *
1.9 + * Copyright (c) 1997
1.10 + * Moscow Center for SPARC Technology
1.11 + *
1.12 + * Copyright (c) 1999
1.13 + * Boris Fomitchev
1.14 + *
1.15 + * This material is provided "as is", with absolutely no warranty expressed
1.16 + * or implied. Any use is at your own risk.
1.17 + *
1.18 + * Permission to use or copy this software for any purpose is hereby granted
1.19 + * without fee, provided the above notices are retained on all copies.
1.20 + * Permission to modify the code and to distribute modified code is granted,
1.21 + * provided the above notices are retained, and a notice that the code was
1.22 + * modified is included with the above copyright notice.
1.23 + *
1.24 + */
1.25 +
1.26 +/* NOTE: This is an internal header file, included by other STL headers.
1.27 + * You should not attempt to use it directly.
1.28 + */
1.29 +
1.30 +// rope<_CharT,_Alloc> is a sequence of _CharT.
1.31 +// Ropes appear to be mutable, but update operations
1.32 +// really copy enough of the data structure to leave the original
1.33 +// valid. Thus ropes can be logically copied by just copying
1.34 +// a pointer value.
1.35 +
1.36 +#ifndef _STLP_INTERNAL_ROPE_H
1.37 +#define _STLP_INTERNAL_ROPE_H
1.38 +
1.39 +#ifndef _STLP_INTERNAL_ALGOBASE_H
1.40 +# include <stl/_algobase.h>
1.41 +#endif
1.42 +
1.43 +#ifndef _STLP_IOSFWD
1.44 +# include <iosfwd>
1.45 +#endif
1.46 +
1.47 +#ifndef _STLP_INTERNAL_ALLOC_H
1.48 +# include <stl/_alloc.h>
1.49 +#endif
1.50 +
1.51 +#ifndef _STLP_INTERNAL_ITERATOR_H
1.52 +# include <stl/_iterator.h>
1.53 +#endif
1.54 +
1.55 +#ifndef _STLP_INTERNAL_ALGO_H
1.56 +# include <stl/_algo.h>
1.57 +#endif
1.58 +
1.59 +#ifndef _STLP_INTERNAL_FUNCTION_BASE_H
1.60 +# include <stl/_function_base.h>
1.61 +#endif
1.62 +
1.63 +#ifndef _STLP_INTERNAL_NUMERIC_H
1.64 +# include <stl/_numeric.h>
1.65 +#endif
1.66 +
1.67 +#ifndef _STLP_INTERNAL_HASH_FUN_H
1.68 +# include <stl/_hash_fun.h>
1.69 +#endif
1.70 +
1.71 +#ifndef _STLP_CHAR_TRAITS_H
1.72 +# include <stl/char_traits.h>
1.73 +#endif
1.74 +
1.75 +#ifndef _STLP_INTERNAL_THREADS_H
1.76 +# include <stl/_threads.h>
1.77 +#endif
1.78 +
1.79 +#ifdef _STLP_SGI_THREADS
1.80 +# include <mutex.h>
1.81 +#endif
1.82 +
1.83 +#ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
1.84 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
1.85 +#elif defined(__MRC__)||defined(__SC__)
1.86 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
1.87 +#else
1.88 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
1.89 +#endif
1.90 +
1.91 +_STLP_BEGIN_NAMESPACE
1.92 +
1.93 +// First a lot of forward declarations. The standard seems to require
1.94 +// much stricter "declaration before use" than many of the implementations
1.95 +// that preceded it.
1.96 +template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
1.97 +template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
1.98 +template<class _CharT, class _Alloc> struct _Rope_RopeRep;
1.99 +template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
1.100 +template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
1.101 +template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
1.102 +template<class _CharT, class _Alloc> class _Rope_iterator;
1.103 +template<class _CharT, class _Alloc> class _Rope_const_iterator;
1.104 +template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
1.105 +template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
1.106 +
1.107 +_STLP_MOVE_TO_PRIV_NAMESPACE
1.108 +
1.109 +// Some helpers, so we can use the power algorithm on ropes.
1.110 +// See below for why this isn't local to the implementation.
1.111 +
1.112 +// This uses a nonstandard refcount convention.
1.113 +// The result has refcount 0.
1.114 +template<class _CharT, class _Alloc>
1.115 +struct _Rope_Concat_fn
1.116 + : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
1.117 + rope<_CharT,_Alloc> > {
1.118 + rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
1.119 + const rope<_CharT,_Alloc>& __y) {
1.120 + return __x + __y;
1.121 + }
1.122 +};
1.123 +
1.124 +template <class _CharT, class _Alloc>
1.125 +inline
1.126 +rope<_CharT,_Alloc>
1.127 +__identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
1.128 +{ return rope<_CharT,_Alloc>(); }
1.129 +
1.130 +_STLP_MOVE_TO_STD_NAMESPACE
1.131 +
1.132 +// Store an eos
1.133 +template <class _CharT>
1.134 +inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
1.135 +{ *__p = 0; }
1.136 +
1.137 +template <class _CharT>
1.138 +inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
1.139 +{ _STLP_STD::_Construct(__p); }
1.140 +
1.141 +template <class _CharT>
1.142 +inline void _S_construct_null(_CharT *__p) {
1.143 + typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
1.144 + _S_construct_null_aux(__p, _Char_Is_Integral());
1.145 +}
1.146 +
1.147 +// char_producers are logically functions that generate a section of
1.148 +// a string. These can be converted to ropes. The resulting rope
1.149 +// invokes the char_producer on demand. This allows, for example,
1.150 +// files to be viewed as ropes without reading the entire file.
1.151 +template <class _CharT>
1.152 +class char_producer {
1.153 +public:
1.154 + virtual ~char_producer() {}
1.155 + virtual void operator()(size_t __start_pos, size_t __len,
1.156 + _CharT* __buffer) = 0;
1.157 + // Buffer should really be an arbitrary output iterator.
1.158 + // That way we could flatten directly into an ostream, etc.
1.159 + // This is thoroughly impossible, since iterator types don't
1.160 + // have runtime descriptions.
1.161 +};
1.162 +
1.163 +// Sequence buffers:
1.164 +//
1.165 +// Sequence must provide an append operation that appends an
1.166 +// array to the sequence. Sequence buffers are useful only if
1.167 +// appending an entire array is cheaper than appending element by element.
1.168 +// This is true for many string representations.
1.169 +// This should perhaps inherit from ostream<sequence::value_type>
1.170 +// and be implemented correspondingly, so that they can be used
1.171 +// for formatted. For the sake of portability, we don't do this yet.
1.172 +//
1.173 +// For now, sequence buffers behave as output iterators. But they also
1.174 +// behave a little like basic_ostringstream<sequence::value_type> and a
1.175 +// little like containers.
1.176 +
1.177 +template<class _Sequence
1.178 +# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
1.179 + defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
1.180 + , size_t _Buf_sz = 100
1.181 +# if defined(__sgi) && !defined(__GNUC__)
1.182 +# define __TYPEDEF_WORKAROUND
1.183 + ,class _V = typename _Sequence::value_type
1.184 +# endif /* __sgi */
1.185 +# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.186 + >
1.187 +// The 3rd parameter works around a common compiler bug.
1.188 +class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
1.189 +public:
1.190 +# ifndef __TYPEDEF_WORKAROUND
1.191 + typedef typename _Sequence::value_type value_type;
1.192 + typedef sequence_buffer<_Sequence
1.193 +# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
1.194 + defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
1.195 + , _Buf_sz
1.196 + > _Self;
1.197 +# else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.198 + > _Self;
1.199 + enum { _Buf_sz = 100};
1.200 +# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.201 + // # endif
1.202 +# else /* __TYPEDEF_WORKAROUND */
1.203 + typedef _V value_type;
1.204 + typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
1.205 +# endif /* __TYPEDEF_WORKAROUND */
1.206 +protected:
1.207 + _Sequence* _M_prefix;
1.208 + value_type _M_buffer[_Buf_sz];
1.209 + size_t _M_buf_count;
1.210 +public:
1.211 + void flush() {
1.212 + _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
1.213 + _M_buf_count = 0;
1.214 + }
1.215 + ~sequence_buffer() { flush(); }
1.216 + sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
1.217 + sequence_buffer(const _Self& __x) {
1.218 + _M_prefix = __x._M_prefix;
1.219 + _M_buf_count = __x._M_buf_count;
1.220 + copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
1.221 + }
1.222 + sequence_buffer(_Self& __x) {
1.223 + __x.flush();
1.224 + _M_prefix = __x._M_prefix;
1.225 + _M_buf_count = 0;
1.226 + }
1.227 + sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
1.228 + _Self& operator= (_Self& __x) {
1.229 + __x.flush();
1.230 + _M_prefix = __x._M_prefix;
1.231 + _M_buf_count = 0;
1.232 + return *this;
1.233 + }
1.234 + _Self& operator= (const _Self& __x) {
1.235 + _M_prefix = __x._M_prefix;
1.236 + _M_buf_count = __x._M_buf_count;
1.237 + copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
1.238 + return *this;
1.239 + }
1.240 + void push_back(value_type __x) {
1.241 + if (_M_buf_count < _Buf_sz) {
1.242 + _M_buffer[_M_buf_count] = __x;
1.243 + ++_M_buf_count;
1.244 + } else {
1.245 + flush();
1.246 + _M_buffer[0] = __x;
1.247 + _M_buf_count = 1;
1.248 + }
1.249 + }
1.250 + void append(const value_type *__s, size_t __len) {
1.251 + if (__len + _M_buf_count <= _Buf_sz) {
1.252 + size_t __i = _M_buf_count;
1.253 + size_t __j = 0;
1.254 + for (; __j < __len; __i++, __j++) {
1.255 + _M_buffer[__i] = __s[__j];
1.256 + }
1.257 + _M_buf_count += __len;
1.258 + } else if (0 == _M_buf_count) {
1.259 + _M_prefix->append(__s, __s + __len);
1.260 + } else {
1.261 + flush();
1.262 + append(__s, __len);
1.263 + }
1.264 + }
1.265 + _Self& write(const value_type *__s, size_t __len) {
1.266 + append(__s, __len);
1.267 + return *this;
1.268 + }
1.269 + _Self& put(value_type __x) {
1.270 + push_back(__x);
1.271 + return *this;
1.272 + }
1.273 + _Self& operator=(const value_type& __rhs) {
1.274 + push_back(__rhs);
1.275 + return *this;
1.276 + }
1.277 + _Self& operator*() { return *this; }
1.278 + _Self& operator++() { return *this; }
1.279 + _Self& operator++(int) { return *this; }
1.280 +};
1.281 +
1.282 +// The following should be treated as private, at least for now.
1.283 +template<class _CharT>
1.284 +class _Rope_char_consumer {
1.285 +#if !defined (_STLP_MEMBER_TEMPLATES)
1.286 +public:
1.287 + //Without member templates we have to use run-time parameterization.
1.288 + // The symmetry with char_producer is accidental and temporary.
1.289 + virtual ~_Rope_char_consumer() {}
1.290 + virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
1.291 +#endif
1.292 +};
1.293 +
1.294 +//
1.295 +// What follows should really be local to rope. Unfortunately,
1.296 +// that doesn't work, since it makes it impossible to define generic
1.297 +// equality on rope iterators. According to the draft standard, the
1.298 +// template parameters for such an equality operator cannot be inferred
1.299 +// from the occurence of a member class as a parameter.
1.300 +// (SGI compilers in fact allow this, but the __result wouldn't be
1.301 +// portable.)
1.302 +// Similarly, some of the static member functions are member functions
1.303 +// only to avoid polluting the global namespace, and to circumvent
1.304 +// restrictions on type inference for template functions.
1.305 +//
1.306 +
1.307 +//
1.308 +// The internal data structure for representing a rope. This is
1.309 +// private to the implementation. A rope is really just a pointer
1.310 +// to one of these.
1.311 +//
1.312 +// A few basic functions for manipulating this data structure
1.313 +// are members of _RopeRep. Most of the more complex algorithms
1.314 +// are implemented as rope members.
1.315 +//
1.316 +// Some of the static member functions of _RopeRep have identically
1.317 +// named functions in rope that simply invoke the _RopeRep versions.
1.318 +//
1.319 +
1.320 +template<class _CharT, class _Alloc>
1.321 +struct _Rope_RopeRep
1.322 + : public _Refcount_Base
1.323 +{
1.324 + typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
1.325 +public:
1.326 + //
1.327 + // GAB: 11/09/05
1.328 + //
1.329 + // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
1.330 + // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
1.331 + // but this addition causes the sunpro compiler to complain about
1.332 + // multiple declarations during the initialization of "_S_min_len".
1.333 + // Changed to be a fixed value and the sunpro compiler appears to
1.334 + // be happy???
1.335 + //
1.336 +# define __ROPE_MAX_DEPTH 45
1.337 +# define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
1.338 + enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
1.339 + enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
1.340 + // Apparently needed by VC++
1.341 + // The data fields of leaves are allocated with some
1.342 + // extra space, to accomodate future growth and for basic
1.343 + // character types, to hold a trailing eos character.
1.344 + enum { _S_alloc_granularity = 8 };
1.345 +
1.346 + _Tag _M_tag:8;
1.347 + bool _M_is_balanced:8;
1.348 +
1.349 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.350 + typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1.351 +
1.352 + allocator_type get_allocator() const { return allocator_type(_M_size); }
1.353 +
1.354 + unsigned char _M_depth;
1.355 + _CharT* _STLP_VOLATILE _M_c_string;
1.356 + _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
1.357 +
1.358 +# ifdef _STLP_NO_ARROW_OPERATOR
1.359 + _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
1.360 +# endif
1.361 +
1.362 + /* Flattened version of string, if needed. */
1.363 + /* typically 0. */
1.364 + /* If it's not 0, then the memory is owned */
1.365 + /* by this node. */
1.366 + /* In the case of a leaf, this may point to */
1.367 + /* the same memory as the data field. */
1.368 + _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
1.369 + allocator_type __a) :
1.370 + _Refcount_Base(1),
1.371 + _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
1.372 + { }
1.373 +
1.374 + typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
1.375 +# ifdef _STLP_HAS_WCHAR_T
1.376 + typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
1.377 +# else
1.378 + typedef __false_type _IsWCharT;
1.379 +# endif
1.380 +
1.381 + typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
1.382 +
1.383 +#if 0
1.384 + /* Please tell why this code is necessary if you uncomment it.
1.385 + * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
1.386 + * returns a size > n in order to store the terminating null charater. When
1.387 + * instanciation type is not a char or wchar_t this is not guaranty resulting in
1.388 + * memory overrun.
1.389 + */
1.390 + static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
1.391 + // Allow slop for in-place expansion.
1.392 + return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
1.393 + }
1.394 +
1.395 + static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
1.396 + // Allow slop for in-place expansion.
1.397 + return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
1.398 + }
1.399 +#endif
1.400 + // fbp : moved from RopeLeaf
1.401 + static size_t _S_rounded_up_size(size_t __n)
1.402 + //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
1.403 + { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
1.404 +
1.405 + static void _S_free_string( _CharT* __s, size_t __len,
1.406 + allocator_type __a) {
1.407 + _STLP_STD::_Destroy_Range(__s, __s + __len);
1.408 + // This has to be a static member, so this gets a bit messy
1.409 +# ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
1.410 + __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
1.411 +# else
1.412 + __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
1.413 +# endif
1.414 + }
1.415 +
1.416 + // Deallocate data section of a leaf.
1.417 + // This shouldn't be a member function.
1.418 + // But its hard to do anything else at the
1.419 + // moment, because it's templatized w.r.t.
1.420 + // an allocator.
1.421 + // Does nothing if __GC is defined.
1.422 + void _M_free_c_string();
1.423 + void _M_free_tree();
1.424 + // Deallocate t. Assumes t is not 0.
1.425 + void _M_unref_nonnil() {
1.426 + if (_M_decr() == 0) _M_free_tree();
1.427 + }
1.428 + void _M_ref_nonnil() {
1.429 + _M_incr();
1.430 + }
1.431 + static void _S_unref(_Self* __t) {
1.432 + if (0 != __t) {
1.433 + __t->_M_unref_nonnil();
1.434 + }
1.435 + }
1.436 + static void _S_ref(_Self* __t) {
1.437 + if (0 != __t) __t->_M_incr();
1.438 + }
1.439 + //static void _S_free_if_unref(_Self* __t) {
1.440 + // if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
1.441 + //}
1.442 +};
1.443 +
1.444 +template<class _CharT, class _Alloc>
1.445 +struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
1.446 +public:
1.447 + _CharT* _M_data; /* Not necessarily 0 terminated. */
1.448 + /* The allocated size is */
1.449 + /* _S_rounded_up_size(size), except */
1.450 + /* in the GC case, in which it */
1.451 + /* doesn't matter. */
1.452 +private:
1.453 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.454 + typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
1.455 + void _M_init(__true_type const& /*_IsBasicCharType*/) {
1.456 + this->_M_c_string = _M_data;
1.457 + }
1.458 + void _M_init(__false_type const& /*_IsBasicCharType*/) {}
1.459 +
1.460 +public:
1.461 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.462 + typedef typename _RopeRep::allocator_type allocator_type;
1.463 +
1.464 + _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
1.465 + : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
1.466 + _M_data(__d) {
1.467 + _STLP_ASSERT(_p_size > 0)
1.468 + _M_init(_IsBasicCharType());
1.469 + }
1.470 +
1.471 +# ifdef _STLP_NO_ARROW_OPERATOR
1.472 + _Rope_RopeLeaf() {}
1.473 + _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
1.474 +# endif
1.475 +
1.476 +// The constructor assumes that d has been allocated with
1.477 + // the proper allocator and the properly padded size.
1.478 + // In contrast, the destructor deallocates the data:
1.479 + ~_Rope_RopeLeaf() {
1.480 + if (_M_data != this->_M_c_string) {
1.481 + this->_M_free_c_string();
1.482 + }
1.483 + _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
1.484 + }
1.485 +};
1.486 +
1.487 +template<class _CharT, class _Alloc>
1.488 +struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
1.489 +private:
1.490 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.491 +
1.492 +public:
1.493 + _RopeRep* _M_left;
1.494 + _RopeRep* _M_right;
1.495 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.496 + typedef typename _RopeRep::allocator_type allocator_type;
1.497 + _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
1.498 + : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
1.499 + (max)(__l->_M_depth, __r->_M_depth) + 1, false,
1.500 + __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
1.501 + {}
1.502 +# ifdef _STLP_NO_ARROW_OPERATOR
1.503 + _Rope_RopeConcatenation() {}
1.504 + _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
1.505 +# endif
1.506 +
1.507 + ~_Rope_RopeConcatenation() {
1.508 + this->_M_free_c_string();
1.509 + _M_left->_M_unref_nonnil();
1.510 + _M_right->_M_unref_nonnil();
1.511 + }
1.512 +};
1.513 +
1.514 +template <class _CharT, class _Alloc>
1.515 +struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
1.516 +private:
1.517 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.518 +public:
1.519 + char_producer<_CharT>* _M_fn;
1.520 + /*
1.521 + * Char_producer is owned by the
1.522 + * rope and should be explicitly
1.523 + * deleted when the rope becomes
1.524 + * inaccessible.
1.525 + */
1.526 + bool _M_delete_when_done;
1.527 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.528 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.529 +# ifdef _STLP_NO_ARROW_OPERATOR
1.530 + _Rope_RopeFunction() {}
1.531 + _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
1.532 +# endif
1.533 +
1.534 + _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
1.535 + bool __d, allocator_type __a)
1.536 + : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
1.537 + , _M_delete_when_done(__d)
1.538 + { _STLP_ASSERT(_p_size > 0) }
1.539 +
1.540 + ~_Rope_RopeFunction() {
1.541 + this->_M_free_c_string();
1.542 + if (_M_delete_when_done) {
1.543 + delete _M_fn;
1.544 + }
1.545 + }
1.546 +};
1.547 +
1.548 +/*
1.549 + * Substring results are usually represented using just
1.550 + * concatenation nodes. But in the case of very long flat ropes
1.551 + * or ropes with a functional representation that isn't practical.
1.552 + * In that case, we represent the __result as a special case of
1.553 + * RopeFunction, whose char_producer points back to the rope itself.
1.554 + * In all cases except repeated substring operations and
1.555 + * deallocation, we treat the __result as a RopeFunction.
1.556 + */
1.557 +template<class _CharT, class _Alloc>
1.558 +struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
1.559 +public:
1.560 + // XXX this whole class should be rewritten.
1.561 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.562 + _RopeRep *_M_base; // not 0
1.563 + size_t _M_start;
1.564 + /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
1.565 + _CharT* __buffer) {
1.566 + typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1.567 + typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1.568 + switch (_M_base->_M_tag) {
1.569 + case _RopeRep::_S_function:
1.570 + case _RopeRep::_S_substringfn:
1.571 + {
1.572 + char_producer<_CharT>* __fn =
1.573 + __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
1.574 + _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
1.575 + _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
1.576 + (*__fn)(__start_pos + _M_start, __req_len, __buffer);
1.577 + }
1.578 + break;
1.579 + case _RopeRep::_S_leaf:
1.580 + {
1.581 + _CharT* __s =
1.582 + __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
1.583 + _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
1.584 + }
1.585 + break;
1.586 + default:
1.587 + _STLP_ASSERT(false)
1.588 + ;
1.589 + }
1.590 + }
1.591 +
1.592 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.593 + typedef typename _RopeRep::allocator_type allocator_type;
1.594 +
1.595 + _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
1.596 + : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
1.597 + _M_base(__b), _M_start(__s) {
1.598 + _STLP_ASSERT(__l > 0)
1.599 + _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
1.600 + _M_base->_M_ref_nonnil();
1.601 + this->_M_tag = _RopeRep::_S_substringfn;
1.602 + }
1.603 + virtual ~_Rope_RopeSubstring()
1.604 + { _M_base->_M_unref_nonnil(); }
1.605 +};
1.606 +
1.607 +/*
1.608 + * Self-destructing pointers to Rope_rep.
1.609 + * These are not conventional smart pointers. Their
1.610 + * only purpose in life is to ensure that unref is called
1.611 + * on the pointer either at normal exit or if an exception
1.612 + * is raised. It is the caller's responsibility to
1.613 + * adjust reference counts when these pointers are initialized
1.614 + * or assigned to. (This convention significantly reduces
1.615 + * the number of potentially expensive reference count
1.616 + * updates.)
1.617 + */
1.618 +template<class _CharT, class _Alloc>
1.619 +struct _Rope_self_destruct_ptr {
1.620 + _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
1.621 + ~_Rope_self_destruct_ptr()
1.622 + { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
1.623 +# ifdef _STLP_USE_EXCEPTIONS
1.624 + _Rope_self_destruct_ptr() : _M_ptr(0) {}
1.625 +# else
1.626 + _Rope_self_destruct_ptr() {}
1.627 +# endif
1.628 + _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
1.629 + _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
1.630 + _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
1.631 + operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
1.632 + _Rope_self_destruct_ptr<_CharT, _Alloc>&
1.633 + operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
1.634 + { _M_ptr = __x; return *this; }
1.635 +};
1.636 +
1.637 +/*
1.638 + * Dereferencing a nonconst iterator has to return something
1.639 + * that behaves almost like a reference. It's not possible to
1.640 + * return an actual reference since assignment requires extra
1.641 + * work. And we would get into the same problems as with the
1.642 + * CD2 version of basic_string.
1.643 + */
1.644 +template<class _CharT, class _Alloc>
1.645 +class _Rope_char_ref_proxy {
1.646 + typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
1.647 + friend class rope<_CharT,_Alloc>;
1.648 + friend class _Rope_iterator<_CharT,_Alloc>;
1.649 + friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1.650 + typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1.651 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.652 + typedef rope<_CharT,_Alloc> _My_rope;
1.653 + size_t _M_pos;
1.654 + _CharT _M_current;
1.655 + bool _M_current_valid;
1.656 + _My_rope* _M_root; // The whole rope.
1.657 +public:
1.658 + _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
1.659 + _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
1.660 + _Rope_char_ref_proxy(const _Self& __x) :
1.661 + _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
1.662 + // Don't preserve cache if the reference can outlive the
1.663 + // expression. We claim that's not possible without calling
1.664 + // a copy constructor or generating reference to a proxy
1.665 + // reference. We declare the latter to have undefined semantics.
1.666 + _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
1.667 + : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
1.668 + inline operator _CharT () const;
1.669 + _Self& operator= (_CharT __c);
1.670 + _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
1.671 + _Self& operator= (const _Self& __c) {
1.672 + return operator=((_CharT)__c);
1.673 + }
1.674 +};
1.675 +
1.676 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
1.677 +template<class _CharT, class __Alloc>
1.678 +inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
1.679 + _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
1.680 + _CharT __tmp = __a;
1.681 + __a = __b;
1.682 + __b = __tmp;
1.683 +}
1.684 +#else
1.685 +// There is no really acceptable way to handle this. The default
1.686 +// definition of swap doesn't work for proxy references.
1.687 +// It can't really be made to work, even with ugly hacks, since
1.688 +// the only unusual operation it uses is the copy constructor, which
1.689 +// is needed for other purposes. We provide a macro for
1.690 +// full specializations, and instantiate the most common case.
1.691 +# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
1.692 + inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
1.693 + _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
1.694 + _CharT __tmp = __a; \
1.695 + __a = __b; \
1.696 + __b = __tmp; \
1.697 + }
1.698 +
1.699 +_ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
1.700 +
1.701 +# ifndef _STLP_NO_WCHAR_T
1.702 +_ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
1.703 +# endif
1.704 +
1.705 +#endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
1.706 +
1.707 +template<class _CharT, class _Alloc>
1.708 +class _Rope_char_ptr_proxy {
1.709 + // XXX this class should be rewritten.
1.710 +public:
1.711 + typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
1.712 + friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1.713 + size_t _M_pos;
1.714 + rope<_CharT,_Alloc>* _M_root; // The whole rope.
1.715 +
1.716 + _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
1.717 + : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
1.718 + _Rope_char_ptr_proxy(const _Self& __x)
1.719 + : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
1.720 + _Rope_char_ptr_proxy() {}
1.721 + _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
1.722 + _STLP_ASSERT(0 == __x)
1.723 + }
1.724 + _Self& operator= (const _Self& __x) {
1.725 + _M_pos = __x._M_pos;
1.726 + _M_root = __x._M_root;
1.727 + return *this;
1.728 + }
1.729 +
1.730 + _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
1.731 + return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
1.732 + }
1.733 +};
1.734 +
1.735 +
1.736 +/*
1.737 + * Rope iterators:
1.738 + * Unlike in the C version, we cache only part of the stack
1.739 + * for rope iterators, since they must be efficiently copyable.
1.740 + * When we run out of cache, we have to reconstruct the iterator
1.741 + * value.
1.742 + * Pointers from iterators are not included in reference counts.
1.743 + * Iterators are assumed to be thread private. Ropes can
1.744 + * be shared.
1.745 + */
1.746 +template<class _CharT, class _Alloc>
1.747 +class _Rope_iterator_base
1.748 +/* : public random_access_iterator<_CharT, ptrdiff_t> */
1.749 +{
1.750 + friend class rope<_CharT,_Alloc>;
1.751 + typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
1.752 + typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
1.753 +public:
1.754 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.755 +
1.756 + enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
1.757 + enum { _S_iterator_buf_len = 15 };
1.758 + size_t _M_current_pos;
1.759 + // The whole rope.
1.760 + _RopeRep* _M_root;
1.761 + // Starting position for current leaf
1.762 + size_t _M_leaf_pos;
1.763 + // Buffer possibly containing current char.
1.764 + _CharT* _M_buf_start;
1.765 + // Pointer to current char in buffer, != 0 ==> buffer valid.
1.766 + _CharT* _M_buf_ptr;
1.767 + // One past __last valid char in buffer.
1.768 + _CharT* _M_buf_end;
1.769 +
1.770 + // What follows is the path cache. We go out of our
1.771 + // way to make this compact.
1.772 + // Path_end contains the bottom section of the path from
1.773 + // the root to the current leaf.
1.774 + struct {
1.775 +# if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
1.776 + _RopeRep const*_M_data[4];
1.777 +# else
1.778 + _RopeRep const*_M_data[_S_path_cache_len];
1.779 +# endif
1.780 + } _M_path_end;
1.781 + // Last valid __pos in path_end;
1.782 + // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
1.783 + // point to concatenation nodes.
1.784 + int _M_leaf_index;
1.785 + // (_M_path_directions >> __i) & 1 is 1
1.786 + // if we got from _M_path_end[leaf_index - __i - 1]
1.787 + // to _M_path_end[leaf_index - __i] by going to the
1.788 + // __right. Assumes path_cache_len <= 9.
1.789 + unsigned char _M_path_directions;
1.790 + // Short buffer for surrounding chars.
1.791 + // This is useful primarily for
1.792 + // RopeFunctions. We put the buffer
1.793 + // here to avoid locking in the
1.794 + // multithreaded case.
1.795 + // The cached path is generally assumed to be valid
1.796 + // only if the buffer is valid.
1.797 + struct {
1.798 +# if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
1.799 + _CharT _M_data[15];
1.800 +# else
1.801 + _CharT _M_data[_S_iterator_buf_len];
1.802 +# endif
1.803 + } _M_tmp_buf;
1.804 +
1.805 + // Set buffer contents given path cache.
1.806 + static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.807 + // Set buffer contents and path cache.
1.808 + static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.809 + // As above, but assumes path cache is valid for previous posn.
1.810 + static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.811 + _Rope_iterator_base() {}
1.812 + _Rope_iterator_base(_RopeRep* __root, size_t __pos)
1.813 + : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
1.814 + void _M_incr(size_t __n);
1.815 + void _M_decr(size_t __n);
1.816 +public:
1.817 + size_t index() const { return _M_current_pos; }
1.818 + void _M_copy_buf(const _Self& __x) {
1.819 + _M_tmp_buf = __x._M_tmp_buf;
1.820 + if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
1.821 + _M_buf_start = _M_tmp_buf._M_data;
1.822 + _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
1.823 + _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
1.824 + }
1.825 + }
1.826 +
1.827 + _Rope_iterator_base(const _Self& __x)
1.828 + : _M_current_pos(__x._M_current_pos), _M_root(__x._M_root),
1.829 + _M_buf_start(__x._M_buf_start), _M_buf_ptr(__x._M_buf_ptr), _M_path_end(__x._M_path_end),
1.830 + _M_leaf_index(__x._M_leaf_index), _M_path_directions(__x._M_path_directions) {
1.831 + if (0 != __x._M_buf_ptr) {
1.832 + _M_copy_buf(__x);
1.833 + }
1.834 + }
1.835 + _Self& operator = (const _Self& __x) {
1.836 + _M_current_pos = __x._M_current_pos;
1.837 + _M_root = __x._M_root;
1.838 + _M_buf_start = __x._M_buf_start;
1.839 + _M_buf_ptr = __x._M_buf_ptr;
1.840 + _M_path_end = __x._M_path_end;
1.841 + _M_leaf_index = __x._M_leaf_index;
1.842 + _M_path_directions = __x._M_path_directions;
1.843 + if (0 != __x._M_buf_ptr) {
1.844 + _M_copy_buf(__x);
1.845 + }
1.846 + return *this;
1.847 + }
1.848 +};
1.849 +
1.850 +template<class _CharT, class _Alloc> class _Rope_iterator;
1.851 +
1.852 +template<class _CharT, class _Alloc>
1.853 +class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1.854 + friend class rope<_CharT,_Alloc>;
1.855 + typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
1.856 + typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
1.857 + // protected:
1.858 +public:
1.859 +# ifndef _STLP_HAS_NO_NAMESPACES
1.860 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.861 + // The one from the base class may not be directly visible.
1.862 +# endif
1.863 + _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
1.864 + _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
1.865 + // Only nonconst iterators modify root ref count
1.866 + {}
1.867 +public:
1.868 + typedef _CharT reference; // Really a value. Returning a reference
1.869 + // Would be a mess, since it would have
1.870 + // to be included in refcount.
1.871 + typedef const _CharT* pointer;
1.872 + typedef _CharT value_type;
1.873 + typedef ptrdiff_t difference_type;
1.874 + typedef random_access_iterator_tag iterator_category;
1.875 +
1.876 +public:
1.877 + _Rope_const_iterator() {}
1.878 + _Rope_const_iterator(const _Self& __x) :
1.879 + _Rope_iterator_base<_CharT,_Alloc>(__x) { }
1.880 + _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
1.881 + _Rope_iterator_base<_CharT,_Alloc>(__x) {}
1.882 + _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
1.883 + _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
1.884 + _Self& operator= (const _Self& __x) {
1.885 + _Base::operator=(__x);
1.886 + return *this;
1.887 + }
1.888 + reference operator*() {
1.889 + if (0 == this->_M_buf_ptr)
1.890 +#if !defined (__DMC__)
1.891 + _S_setcache(*this);
1.892 +#else
1.893 + { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
1.894 +#endif
1.895 + return *(this->_M_buf_ptr);
1.896 + }
1.897 + _Self& operator++() {
1.898 + _CharT* __next;
1.899 + if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
1.900 + this->_M_buf_ptr = __next;
1.901 + ++this->_M_current_pos;
1.902 + } else {
1.903 + this->_M_incr(1);
1.904 + }
1.905 + return *this;
1.906 + }
1.907 + _Self& operator+=(ptrdiff_t __n) {
1.908 + if (__n >= 0) {
1.909 + this->_M_incr(__n);
1.910 + } else {
1.911 + this->_M_decr(-__n);
1.912 + }
1.913 + return *this;
1.914 + }
1.915 + _Self& operator--() {
1.916 + this->_M_decr(1);
1.917 + return *this;
1.918 + }
1.919 + _Self& operator-=(ptrdiff_t __n) {
1.920 + if (__n >= 0) {
1.921 + this->_M_decr(__n);
1.922 + } else {
1.923 + this->_M_incr(-__n);
1.924 + }
1.925 + return *this;
1.926 + }
1.927 + _Self operator++(int) {
1.928 + size_t __old_pos = this->_M_current_pos;
1.929 + this->_M_incr(1);
1.930 + return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1.931 + // This makes a subsequent dereference expensive.
1.932 + // Perhaps we should instead copy the iterator
1.933 + // if it has a valid cache?
1.934 + }
1.935 + _Self operator--(int) {
1.936 + size_t __old_pos = this->_M_current_pos;
1.937 + this->_M_decr(1);
1.938 + return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1.939 + }
1.940 + inline reference operator[](size_t __n);
1.941 +};
1.942 +
1.943 +template<class _CharT, class _Alloc>
1.944 +class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1.945 + friend class rope<_CharT,_Alloc>;
1.946 + typedef _Rope_iterator<_CharT, _Alloc> _Self;
1.947 + typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
1.948 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.949 +
1.950 +public:
1.951 + rope<_CharT,_Alloc>* _M_root_rope;
1.952 + // root is treated as a cached version of this,
1.953 + // and is used to detect changes to the underlying
1.954 + // rope.
1.955 + // Root is included in the reference count.
1.956 + // This is necessary so that we can detect changes reliably.
1.957 + // Unfortunately, it requires careful bookkeeping for the
1.958 + // nonGC case.
1.959 + _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
1.960 +
1.961 + void _M_check();
1.962 +public:
1.963 + typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1.964 + typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1.965 + typedef _CharT value_type;
1.966 + typedef ptrdiff_t difference_type;
1.967 + typedef random_access_iterator_tag iterator_category;
1.968 +public:
1.969 + ~_Rope_iterator() { //*TY 5/6/00 - added dtor to balance reference count
1.970 + _RopeRep::_S_unref(this->_M_root);
1.971 + }
1.972 +
1.973 + rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1.974 + _Rope_iterator() {
1.975 + this->_M_root = 0; // Needed for reference counting.
1.976 + }
1.977 + _Rope_iterator(const _Self& __x) :
1.978 + _Rope_iterator_base<_CharT,_Alloc>(__x) {
1.979 + _M_root_rope = __x._M_root_rope;
1.980 + _RopeRep::_S_ref(this->_M_root);
1.981 + }
1.982 + _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1.983 + _Self& operator= (const _Self& __x) {
1.984 + _RopeRep* __old = this->_M_root;
1.985 + _RopeRep::_S_ref(__x._M_root);
1.986 + _Base::operator=(__x);
1.987 + _M_root_rope = __x._M_root_rope;
1.988 + _RopeRep::_S_unref(__old);
1.989 + return *this;
1.990 + }
1.991 + reference operator*() {
1.992 + _M_check();
1.993 + if (0 == this->_M_buf_ptr) {
1.994 + return reference(_M_root_rope, this->_M_current_pos);
1.995 + } else {
1.996 + return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
1.997 + }
1.998 + }
1.999 + _Self& operator++() {
1.1000 + this->_M_incr(1);
1.1001 + return *this;
1.1002 + }
1.1003 + _Self& operator+=(ptrdiff_t __n) {
1.1004 + if (__n >= 0) {
1.1005 + this->_M_incr(__n);
1.1006 + } else {
1.1007 + this->_M_decr(-__n);
1.1008 + }
1.1009 + return *this;
1.1010 + }
1.1011 + _Self& operator--() {
1.1012 + this->_M_decr(1);
1.1013 + return *this;
1.1014 + }
1.1015 + _Self& operator-=(ptrdiff_t __n) {
1.1016 + if (__n >= 0) {
1.1017 + this->_M_decr(__n);
1.1018 + } else {
1.1019 + this->_M_incr(-__n);
1.1020 + }
1.1021 + return *this;
1.1022 + }
1.1023 + _Self operator++(int) {
1.1024 + size_t __old_pos = this->_M_current_pos;
1.1025 + this->_M_incr(1);
1.1026 + return _Self(_M_root_rope, __old_pos);
1.1027 + }
1.1028 + _Self operator--(int) {
1.1029 + size_t __old_pos = this->_M_current_pos;
1.1030 + this->_M_decr(1);
1.1031 + return _Self(_M_root_rope, __old_pos);
1.1032 + }
1.1033 + reference operator[](ptrdiff_t __n) {
1.1034 + return reference(_M_root_rope, this->_M_current_pos + __n);
1.1035 + }
1.1036 +};
1.1037 +
1.1038 +# ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1.1039 +template <class _CharT, class _Alloc>
1.1040 +inline random_access_iterator_tag
1.1041 +iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
1.1042 +template <class _CharT, class _Alloc>
1.1043 +inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1.1044 +template <class _CharT, class _Alloc>
1.1045 +inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1.1046 +template <class _CharT, class _Alloc>
1.1047 +inline random_access_iterator_tag
1.1048 +iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1.1049 +template <class _CharT, class _Alloc>
1.1050 +inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1.1051 +template <class _CharT, class _Alloc>
1.1052 +inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1.1053 +#endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
1.1054 +
1.1055 +template <class _CharT, class _Alloc, class _CharConsumer>
1.1056 +bool _S_apply_to_pieces(_CharConsumer& __c,
1.1057 + _Rope_RopeRep<_CharT, _Alloc> *__r,
1.1058 + size_t __begin, size_t __end);
1.1059 + // begin and end are assumed to be in range.
1.1060 +
1.1061 +template <class _CharT, class _Alloc>
1.1062 +class rope
1.1063 +#if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
1.1064 + : public __stlport_class<rope<_CharT, _Alloc> >
1.1065 +#endif
1.1066 +{
1.1067 + typedef rope<_CharT,_Alloc> _Self;
1.1068 +public:
1.1069 + typedef _CharT value_type;
1.1070 + typedef ptrdiff_t difference_type;
1.1071 + typedef size_t size_type;
1.1072 + typedef _CharT const_reference;
1.1073 + typedef const _CharT* const_pointer;
1.1074 + typedef _Rope_iterator<_CharT,_Alloc> iterator;
1.1075 + typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1.1076 + typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1.1077 + typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1.1078 +
1.1079 + friend class _Rope_iterator<_CharT,_Alloc>;
1.1080 + friend class _Rope_const_iterator<_CharT,_Alloc>;
1.1081 + friend struct _Rope_RopeRep<_CharT,_Alloc>;
1.1082 + friend class _Rope_iterator_base<_CharT,_Alloc>;
1.1083 + friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1.1084 + friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1.1085 + friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1.1086 +
1.1087 + _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1.1088 +
1.1089 +protected:
1.1090 + typedef _CharT* _Cstrptr;
1.1091 +
1.1092 + static _CharT _S_empty_c_str[1];
1.1093 +
1.1094 + enum { _S_copy_max = 23 };
1.1095 + // For strings shorter than _S_copy_max, we copy to
1.1096 + // concatenate.
1.1097 +
1.1098 + typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1.1099 + typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
1.1100 +
1.1101 +public:
1.1102 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.1103 + typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1.1104 +
1.1105 +public:
1.1106 + // The only data member of a rope:
1.1107 + _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1.1108 +
1.1109 +public:
1.1110 + allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1.1111 +
1.1112 +public:
1.1113 + typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1.1114 + typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1.1115 + typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1.1116 + typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1.1117 +
1.1118 + // Retrieve a character at the indicated position.
1.1119 + static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1.1120 +
1.1121 + // Obtain a pointer to the character at the indicated position.
1.1122 + // The pointer can be used to change the character.
1.1123 + // If such a pointer cannot be produced, as is frequently the
1.1124 + // case, 0 is returned instead.
1.1125 + // (Returns nonzero only if all nodes in the path have a refcount
1.1126 + // of 1.)
1.1127 + static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1.1128 +
1.1129 + static void _S_unref(_RopeRep* __t) {
1.1130 + _RopeRep::_S_unref(__t);
1.1131 + }
1.1132 + static void _S_ref(_RopeRep* __t) {
1.1133 + _RopeRep::_S_ref(__t);
1.1134 + }
1.1135 +
1.1136 + typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1.1137 +
1.1138 + // _Result is counted in refcount.
1.1139 + static _RopeRep* _S_substring(_RopeRep* __base,
1.1140 + size_t __start, size_t __endp1);
1.1141 +
1.1142 + static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1.1143 + const _CharT* __iter, size_t __slen);
1.1144 + // Concatenate rope and char ptr, copying __s.
1.1145 + // Should really take an arbitrary iterator.
1.1146 + // Result is counted in refcount.
1.1147 + static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1.1148 + const _CharT* __iter, size_t __slen);
1.1149 + // As above, but one reference to __r is about to be
1.1150 + // destroyed. Thus the pieces may be recycled if all
1.1151 + // relevent reference counts are 1.
1.1152 +
1.1153 + // General concatenation on _RopeRep. _Result
1.1154 + // has refcount of 1. Adjusts argument refcounts.
1.1155 + static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1.1156 +
1.1157 +public:
1.1158 +#if defined (_STLP_MEMBER_TEMPLATES)
1.1159 + template <class _CharConsumer>
1.1160 +#else
1.1161 + typedef _Rope_char_consumer<_CharT> _CharConsumer;
1.1162 +#endif
1.1163 + void apply_to_pieces(size_t __begin, size_t __end,
1.1164 + _CharConsumer& __c) const
1.1165 + { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
1.1166 +
1.1167 +protected:
1.1168 +
1.1169 + static size_t _S_rounded_up_size(size_t __n)
1.1170 + { return _RopeRep::_S_rounded_up_size(__n); }
1.1171 +
1.1172 + // Allocate and construct a RopeLeaf using the supplied allocator
1.1173 + // Takes ownership of s instead of copying.
1.1174 + static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
1.1175 + size_t _p_size, allocator_type __a) {
1.1176 + _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1.1177 + _RopeLeaf).allocate(1);
1.1178 + _STLP_TRY {
1.1179 + _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1.1180 + }
1.1181 + _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1182 + _RopeLeaf).deallocate(__space, 1))
1.1183 + return __space;
1.1184 + }
1.1185 +
1.1186 + static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
1.1187 + allocator_type __a) {
1.1188 + _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1.1189 + _RopeConcatenation).allocate(1);
1.1190 + return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1.1191 + }
1.1192 +
1.1193 + static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1.1194 + size_t _p_size, bool __d, allocator_type __a) {
1.1195 + _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1.1196 + _RopeFunction).allocate(1);
1.1197 + return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1.1198 + }
1.1199 +
1.1200 + static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1.1201 + size_t __l, allocator_type __a) {
1.1202 + _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1.1203 + _RopeSubstring).allocate(1);
1.1204 + return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1.1205 + }
1.1206 +
1.1207 + static
1.1208 + _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1.1209 + size_t _p_size, allocator_type __a) {
1.1210 + if (0 == _p_size) return 0;
1.1211 +
1.1212 + _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1.1213 +
1.1214 + _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
1.1215 + _S_construct_null(__buf + _p_size);
1.1216 +
1.1217 + _STLP_TRY {
1.1218 + return _S_new_RopeLeaf(__buf, _p_size, __a);
1.1219 + }
1.1220 + _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1.1221 + _STLP_RET_AFTER_THROW(0)
1.1222 + }
1.1223 +
1.1224 +
1.1225 + // Concatenation of nonempty strings.
1.1226 + // Always builds a concatenation node.
1.1227 + // Rebalances if the result is too deep.
1.1228 + // Result has refcount 1.
1.1229 + // Does not increment left and right ref counts even though
1.1230 + // they are referenced.
1.1231 + static _RopeRep*
1.1232 + _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1.1233 +
1.1234 + // Concatenation helper functions
1.1235 + static _RopeLeaf*
1.1236 + _S_leaf_concat_char_iter(_RopeLeaf* __r,
1.1237 + const _CharT* __iter, size_t __slen);
1.1238 + // Concatenate by copying leaf.
1.1239 + // should take an arbitrary iterator
1.1240 + // result has refcount 1.
1.1241 + static _RopeLeaf* _S_destr_leaf_concat_char_iter
1.1242 + (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1.1243 + // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1.1244 +
1.1245 +
1.1246 + // A helper function for exponentiating strings.
1.1247 + // This uses a nonstandard refcount convention.
1.1248 + // The result has refcount 0.
1.1249 + typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1.1250 +#if !defined (__GNUC__) || (__GNUC__ < 3)
1.1251 + friend _Concat_fn;
1.1252 +#else
1.1253 + friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
1.1254 +#endif
1.1255 +
1.1256 +public:
1.1257 + static size_t _S_char_ptr_len(const _CharT* __s) {
1.1258 + return char_traits<_CharT>::length(__s);
1.1259 + }
1.1260 +
1.1261 +public: /* for operators */
1.1262 + rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1.1263 + : _M_tree_ptr(__a, __t) { }
1.1264 +private:
1.1265 + // Copy __r to the _CharT buffer.
1.1266 + // Returns __buffer + __r->_M_size._M_data.
1.1267 + // Assumes that buffer is uninitialized.
1.1268 + static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1.1269 +
1.1270 + // Again, with explicit starting position and length.
1.1271 + // Assumes that buffer is uninitialized.
1.1272 + static _CharT* _S_flatten(_RopeRep* __r,
1.1273 + size_t __start, size_t __len,
1.1274 + _CharT* __buffer);
1.1275 +
1.1276 + // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1.1277 +public:
1.1278 + static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
1.1279 +protected:
1.1280 + static bool _S_is_balanced(_RopeRep* __r)
1.1281 + { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1.1282 +
1.1283 + static bool _S_is_almost_balanced(_RopeRep* __r) {
1.1284 + return (__r->_M_depth == 0 ||
1.1285 + __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
1.1286 + }
1.1287 +
1.1288 + static bool _S_is_roughly_balanced(_RopeRep* __r) {
1.1289 + return (__r->_M_depth <= 1 ||
1.1290 + __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
1.1291 + }
1.1292 +
1.1293 + // Assumes the result is not empty.
1.1294 + static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1.1295 + _RopeRep* __right) {
1.1296 + _RopeRep* __result = _S_concat_rep(__left, __right);
1.1297 + if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1.1298 + return __result;
1.1299 + }
1.1300 +
1.1301 + // The basic rebalancing operation. Logically copies the
1.1302 + // rope. The result has refcount of 1. The client will
1.1303 + // usually decrement the reference count of __r.
1.1304 + // The result is within height 2 of balanced by the above
1.1305 + // definition.
1.1306 + static _RopeRep* _S_balance(_RopeRep* __r);
1.1307 +
1.1308 + // Add all unbalanced subtrees to the forest of balanceed trees.
1.1309 + // Used only by balance.
1.1310 + static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1.1311 +
1.1312 + // Add __r to forest, assuming __r is already balanced.
1.1313 + static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1.1314 +
1.1315 +#ifdef _STLP_DEBUG
1.1316 + // Print to stdout, exposing structure
1.1317 + static void _S_dump(_RopeRep* __r, int __indent = 0);
1.1318 +#endif
1.1319 +
1.1320 + // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1.1321 + static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1.1322 +
1.1323 + void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
1.1324 +
1.1325 + void _M_reset(_RopeRep* __r) {
1.1326 + //if (__r != _M_tree_ptr._M_data) {
1.1327 + _S_unref(_M_tree_ptr._M_data);
1.1328 + _M_tree_ptr._M_data = __r;
1.1329 + //}
1.1330 + }
1.1331 +
1.1332 +public:
1.1333 + bool empty() const { return 0 == _M_tree_ptr._M_data; }
1.1334 +
1.1335 + // Comparison member function. This is public only for those
1.1336 + // clients that need a ternary comparison. Others
1.1337 + // should use the comparison operators below.
1.1338 + int compare(const _Self& __y) const {
1.1339 + return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1.1340 + }
1.1341 +
1.1342 + rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1.1343 + : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
1.1344 + {}
1.1345 +
1.1346 + rope(const _CharT* __s, size_t __len,
1.1347 + const allocator_type& __a = allocator_type())
1.1348 + : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
1.1349 + {}
1.1350 +
1.1351 + // Should perhaps be templatized with respect to the iterator type
1.1352 + // and use Sequence_buffer. (It should perhaps use sequence_buffer
1.1353 + // even now.)
1.1354 + rope(const _CharT *__s, const _CharT *__e,
1.1355 + const allocator_type& __a = allocator_type())
1.1356 + : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
1.1357 + {}
1.1358 +
1.1359 + rope(const const_iterator& __s, const const_iterator& __e,
1.1360 + const allocator_type& __a = allocator_type())
1.1361 + : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1.1362 + __e._M_current_pos))
1.1363 + {}
1.1364 +
1.1365 + rope(const iterator& __s, const iterator& __e,
1.1366 + const allocator_type& __a = allocator_type())
1.1367 + : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1.1368 + __e._M_current_pos))
1.1369 + {}
1.1370 +
1.1371 + rope(_CharT __c, const allocator_type& __a = allocator_type())
1.1372 + : _M_tree_ptr(__a, (_RopeRep*)0) {
1.1373 + _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1.1374 +
1.1375 + _Copy_Construct(__buf, __c);
1.1376 + _S_construct_null(__buf + 1);
1.1377 +
1.1378 + _STLP_TRY {
1.1379 + _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1.1380 + }
1.1381 + _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1.1382 + }
1.1383 +
1.1384 + rope(size_t __n, _CharT __c,
1.1385 + const allocator_type& __a = allocator_type()):
1.1386 + _M_tree_ptr(__a, (_RopeRep*)0) {
1.1387 + if (0 == __n)
1.1388 + return;
1.1389 +
1.1390 + rope<_CharT,_Alloc> __result;
1.1391 +# define __exponentiate_threshold size_t(32)
1.1392 + _RopeRep* __remainder;
1.1393 + rope<_CharT,_Alloc> __remainder_rope;
1.1394 +
1.1395 + // gcc-2.7.2 bugs
1.1396 + typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1.1397 +
1.1398 + size_t __exponent = __n / __exponentiate_threshold;
1.1399 + size_t __rest = __n % __exponentiate_threshold;
1.1400 + if (0 == __rest) {
1.1401 + __remainder = 0;
1.1402 + } else {
1.1403 + _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1.1404 + uninitialized_fill_n(__rest_buffer, __rest, __c);
1.1405 + _S_construct_null(__rest_buffer + __rest);
1.1406 + _STLP_TRY {
1.1407 + __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1.1408 + }
1.1409 + _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1.1410 + }
1.1411 + __remainder_rope._M_tree_ptr._M_data = __remainder;
1.1412 + if (__exponent != 0) {
1.1413 + _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1.1414 + _RopeLeaf* __base_leaf;
1.1415 + rope<_CharT,_Alloc> __base_rope;
1.1416 + uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1.1417 + _S_construct_null(__base_buffer + __exponentiate_threshold);
1.1418 + _STLP_TRY {
1.1419 + __base_leaf = _S_new_RopeLeaf(__base_buffer,
1.1420 + __exponentiate_threshold, __a);
1.1421 + }
1.1422 + _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1.1423 + __exponentiate_threshold, __a))
1.1424 + __base_rope._M_tree_ptr._M_data = __base_leaf;
1.1425 + if (1 == __exponent) {
1.1426 + __result = __base_rope;
1.1427 + // One each for base_rope and __result
1.1428 + //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1.1429 + } else {
1.1430 + __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
1.1431 + }
1.1432 + if (0 != __remainder) {
1.1433 + __result += __remainder_rope;
1.1434 + }
1.1435 + } else {
1.1436 + __result = __remainder_rope;
1.1437 + }
1.1438 + _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
1.1439 + _M_tree_ptr._M_data->_M_ref_nonnil();
1.1440 +# undef __exponentiate_threshold
1.1441 + }
1.1442 +
1.1443 + rope(const allocator_type& __a = allocator_type())
1.1444 + : _M_tree_ptr(__a, (_RopeRep*)0) {}
1.1445 +
1.1446 + // Construct a rope from a function that can compute its members
1.1447 + rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1.1448 + const allocator_type& __a = allocator_type())
1.1449 + : _M_tree_ptr(__a, (_RopeRep*)0) {
1.1450 + _M_tree_ptr._M_data = (0 == __len) ?
1.1451 + 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1.1452 + }
1.1453 +
1.1454 + rope(const _Self& __x)
1.1455 + : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
1.1456 + _S_ref(_M_tree_ptr._M_data);
1.1457 + }
1.1458 +
1.1459 + rope(__move_source<_Self> __src)
1.1460 + : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
1.1461 + __src.get()._M_tree_ptr._M_data = 0;
1.1462 + }
1.1463 +
1.1464 + ~rope() {
1.1465 + _S_unref(_M_tree_ptr._M_data);
1.1466 + }
1.1467 +
1.1468 + _Self& operator=(const _Self& __x) {
1.1469 + _STLP_ASSERT(get_allocator() == __x.get_allocator())
1.1470 + _S_ref(__x._M_tree_ptr._M_data);
1.1471 + _M_reset(__x._M_tree_ptr._M_data);
1.1472 + return *this;
1.1473 + }
1.1474 +
1.1475 + void clear() {
1.1476 + _S_unref(_M_tree_ptr._M_data);
1.1477 + _M_tree_ptr._M_data = 0;
1.1478 + }
1.1479 + void push_back(_CharT __x) {
1.1480 + _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
1.1481 + }
1.1482 +
1.1483 + void pop_back() {
1.1484 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1485 + _M_tree_ptr._M_data =
1.1486 + _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
1.1487 + _S_unref(__old);
1.1488 + }
1.1489 +
1.1490 + _CharT back() const {
1.1491 + return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
1.1492 + }
1.1493 +
1.1494 + void push_front(_CharT __x) {
1.1495 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1496 + _RopeRep* __left =
1.1497 + _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
1.1498 + _STLP_TRY {
1.1499 + _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
1.1500 + _S_unref(__old);
1.1501 + _S_unref(__left);
1.1502 + }
1.1503 + _STLP_UNWIND(_S_unref(__left))
1.1504 + }
1.1505 +
1.1506 + void pop_front() {
1.1507 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1508 + _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
1.1509 + _S_unref(__old);
1.1510 + }
1.1511 +
1.1512 + _CharT front() const {
1.1513 + return _S_fetch(_M_tree_ptr._M_data, 0);
1.1514 + }
1.1515 +
1.1516 + void balance() {
1.1517 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1518 + _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
1.1519 + _S_unref(__old);
1.1520 + }
1.1521 +
1.1522 + void copy(_CharT* __buffer) const {
1.1523 + _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
1.1524 + _S_flatten(_M_tree_ptr._M_data, __buffer);
1.1525 + }
1.1526 +
1.1527 + /*
1.1528 + * This is the copy function from the standard, but
1.1529 + * with the arguments reordered to make it consistent with the
1.1530 + * rest of the interface.
1.1531 + * Note that this guaranteed not to compile if the draft standard
1.1532 + * order is assumed.
1.1533 + */
1.1534 + size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
1.1535 + size_t _p_size = size();
1.1536 + size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
1.1537 +
1.1538 + _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
1.1539 + _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
1.1540 + return __len;
1.1541 + }
1.1542 +
1.1543 +# ifdef _STLP_DEBUG
1.1544 + // Print to stdout, exposing structure. May be useful for
1.1545 + // performance debugging.
1.1546 + void dump() {
1.1547 + _S_dump(_M_tree_ptr._M_data);
1.1548 + }
1.1549 +# endif
1.1550 +
1.1551 + // Convert to 0 terminated string in new allocated memory.
1.1552 + // Embedded 0s in the input do not terminate the copy.
1.1553 + const _CharT* c_str() const;
1.1554 +
1.1555 + // As above, but also use the flattened representation as the
1.1556 + // the new rope representation.
1.1557 + const _CharT* replace_with_c_str();
1.1558 +
1.1559 + // Reclaim memory for the c_str generated flattened string.
1.1560 + // Intentionally undocumented, since it's hard to say when this
1.1561 + // is safe for multiple threads.
1.1562 + void delete_c_str () {
1.1563 + if (0 == _M_tree_ptr._M_data) return;
1.1564 + if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
1.1565 + ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
1.1566 + _M_tree_ptr._M_data->_M_c_string) {
1.1567 + // Representation shared
1.1568 + return;
1.1569 + }
1.1570 + _M_tree_ptr._M_data->_M_free_c_string();
1.1571 + _M_tree_ptr._M_data->_M_c_string = 0;
1.1572 + }
1.1573 +
1.1574 + _CharT operator[] (size_type __pos) const {
1.1575 + return _S_fetch(_M_tree_ptr._M_data, __pos);
1.1576 + }
1.1577 +
1.1578 + _CharT at(size_type __pos) const {
1.1579 + if (__pos >= size()) _M_throw_out_of_range();
1.1580 + return (*this)[__pos];
1.1581 + }
1.1582 +
1.1583 + const_iterator begin() const {
1.1584 + return(const_iterator(_M_tree_ptr._M_data, 0));
1.1585 + }
1.1586 +
1.1587 + // An easy way to get a const iterator from a non-const container.
1.1588 + const_iterator const_begin() const {
1.1589 + return(const_iterator(_M_tree_ptr._M_data, 0));
1.1590 + }
1.1591 +
1.1592 + const_iterator end() const {
1.1593 + return(const_iterator(_M_tree_ptr._M_data, size()));
1.1594 + }
1.1595 +
1.1596 + const_iterator const_end() const {
1.1597 + return(const_iterator(_M_tree_ptr._M_data, size()));
1.1598 + }
1.1599 +
1.1600 + size_type size() const {
1.1601 + return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
1.1602 + }
1.1603 +
1.1604 + size_type length() const {
1.1605 + return size();
1.1606 + }
1.1607 +
1.1608 + size_type max_size() const {
1.1609 + return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
1.1610 + // Guarantees that the result can be sufficiently
1.1611 + // balanced. Longer ropes will probably still work,
1.1612 + // but it's harder to make guarantees.
1.1613 + }
1.1614 +
1.1615 + const_reverse_iterator rbegin() const {
1.1616 + return const_reverse_iterator(end());
1.1617 + }
1.1618 +
1.1619 + const_reverse_iterator const_rbegin() const {
1.1620 + return const_reverse_iterator(end());
1.1621 + }
1.1622 +
1.1623 + const_reverse_iterator rend() const {
1.1624 + return const_reverse_iterator(begin());
1.1625 + }
1.1626 +
1.1627 + const_reverse_iterator const_rend() const {
1.1628 + return const_reverse_iterator(begin());
1.1629 + }
1.1630 + // The symmetric cases are intentionally omitted, since they're presumed
1.1631 + // to be less common, and we don't handle them as well.
1.1632 +
1.1633 + // The following should really be templatized.
1.1634 + // The first argument should be an input iterator or
1.1635 + // forward iterator with value_type _CharT.
1.1636 + _Self& append(const _CharT* __iter, size_t __n) {
1.1637 + _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
1.1638 + return *this;
1.1639 + }
1.1640 +
1.1641 + _Self& append(const _CharT* __c_string) {
1.1642 + size_t __len = _S_char_ptr_len(__c_string);
1.1643 + append(__c_string, __len);
1.1644 + return *this;
1.1645 + }
1.1646 +
1.1647 + _Self& append(const _CharT* __s, const _CharT* __e) {
1.1648 + _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
1.1649 + return *this;
1.1650 + }
1.1651 +
1.1652 + _Self& append(const_iterator __s, const_iterator __e) {
1.1653 + _STLP_ASSERT(__s._M_root == __e._M_root)
1.1654 + _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
1.1655 + _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
1.1656 + _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
1.1657 + return *this;
1.1658 + }
1.1659 +
1.1660 + _Self& append(_CharT __c) {
1.1661 + _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
1.1662 + return *this;
1.1663 + }
1.1664 +
1.1665 + _Self& append() { return append(_CharT()); } // XXX why?
1.1666 +
1.1667 + _Self& append(const _Self& __y) {
1.1668 + _STLP_ASSERT(__y.get_allocator() == get_allocator())
1.1669 + _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
1.1670 + return *this;
1.1671 + }
1.1672 +
1.1673 + _Self& append(size_t __n, _CharT __c) {
1.1674 + rope<_CharT,_Alloc> __last(__n, __c);
1.1675 + return append(__last);
1.1676 + }
1.1677 +
1.1678 + void swap(_Self& __b) {
1.1679 + _M_tree_ptr.swap(__b._M_tree_ptr);
1.1680 + }
1.1681 +
1.1682 +protected:
1.1683 + // Result is included in refcount.
1.1684 + static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1.1685 + size_t __pos2, _RopeRep* __r) {
1.1686 + if (0 == __old) { _S_ref(__r); return __r; }
1.1687 + _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
1.1688 + _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
1.1689 + _STLP_MPWFIX_TRY //*TY 06/01/2000 -
1.1690 + _RopeRep* __result;
1.1691 +
1.1692 + if (0 == __r) {
1.1693 + __result = _S_concat_rep(__left, __right);
1.1694 + } else {
1.1695 + _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
1.1696 + _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
1.1697 + __result = _S_concat_rep(__left_result, __right);
1.1698 + }
1.1699 + return __result;
1.1700 + _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
1.1701 + }
1.1702 +
1.1703 +public:
1.1704 + void insert(size_t __p, const _Self& __r) {
1.1705 + if (__p > size()) _M_throw_out_of_range();
1.1706 + _STLP_ASSERT(get_allocator() == __r.get_allocator())
1.1707 + _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
1.1708 + }
1.1709 +
1.1710 + void insert(size_t __p, size_t __n, _CharT __c) {
1.1711 + rope<_CharT,_Alloc> __r(__n,__c);
1.1712 + insert(__p, __r);
1.1713 + }
1.1714 +
1.1715 + void insert(size_t __p, const _CharT* __i, size_t __n) {
1.1716 + if (__p > size()) _M_throw_out_of_range();
1.1717 + _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
1.1718 + _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
1.1719 + _Self_destruct_ptr __left_result(
1.1720 + _S_concat_char_iter(__left, __i, __n));
1.1721 + // _S_ destr_concat_char_iter should be safe here.
1.1722 + // But as it stands it's probably not a win, since __left
1.1723 + // is likely to have additional references.
1.1724 + _M_reset(_S_concat_rep(__left_result, __right));
1.1725 + }
1.1726 +
1.1727 + void insert(size_t __p, const _CharT* __c_string) {
1.1728 + insert(__p, __c_string, _S_char_ptr_len(__c_string));
1.1729 + }
1.1730 +
1.1731 + void insert(size_t __p, _CharT __c) {
1.1732 + insert(__p, &__c, 1);
1.1733 + }
1.1734 +
1.1735 + void insert(size_t __p) {
1.1736 + _CharT __c = _CharT();
1.1737 + insert(__p, &__c, 1);
1.1738 + }
1.1739 +
1.1740 + void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1.1741 + _Self __r(__i, __j);
1.1742 + insert(__p, __r);
1.1743 + }
1.1744 +
1.1745 + void insert(size_t __p, const const_iterator& __i,
1.1746 + const const_iterator& __j) {
1.1747 + _Self __r(__i, __j);
1.1748 + insert(__p, __r);
1.1749 + }
1.1750 +
1.1751 + void insert(size_t __p, const iterator& __i,
1.1752 + const iterator& __j) {
1.1753 + _Self __r(__i, __j);
1.1754 + insert(__p, __r);
1.1755 + }
1.1756 +
1.1757 + // (position, length) versions of replace operations:
1.1758 + void replace(size_t __p, size_t __n, const _Self& __r) {
1.1759 + if (__p > size()) _M_throw_out_of_range();
1.1760 + _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
1.1761 + }
1.1762 +
1.1763 + void replace(size_t __p, size_t __n,
1.1764 + const _CharT* __i, size_t __i_len) {
1.1765 + _Self __r(__i, __i_len);
1.1766 + replace(__p, __n, __r);
1.1767 + }
1.1768 +
1.1769 + void replace(size_t __p, size_t __n, _CharT __c) {
1.1770 + _Self __r(__c);
1.1771 + replace(__p, __n, __r);
1.1772 + }
1.1773 +
1.1774 + void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1.1775 + _Self __r(__c_string);
1.1776 + replace(__p, __n, __r);
1.1777 + }
1.1778 +
1.1779 + void replace(size_t __p, size_t __n,
1.1780 + const _CharT* __i, const _CharT* __j) {
1.1781 + _Self __r(__i, __j);
1.1782 + replace(__p, __n, __r);
1.1783 + }
1.1784 +
1.1785 + void replace(size_t __p, size_t __n,
1.1786 + const const_iterator& __i, const const_iterator& __j) {
1.1787 + _Self __r(__i, __j);
1.1788 + replace(__p, __n, __r);
1.1789 + }
1.1790 +
1.1791 + void replace(size_t __p, size_t __n,
1.1792 + const iterator& __i, const iterator& __j) {
1.1793 + _Self __r(__i, __j);
1.1794 + replace(__p, __n, __r);
1.1795 + }
1.1796 +
1.1797 + // Single character variants:
1.1798 + void replace(size_t __p, _CharT __c) {
1.1799 + if (__p > size()) _M_throw_out_of_range();
1.1800 + iterator __i(this, __p);
1.1801 + *__i = __c;
1.1802 + }
1.1803 +
1.1804 + void replace(size_t __p, const _Self& __r) {
1.1805 + replace(__p, 1, __r);
1.1806 + }
1.1807 +
1.1808 + void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1.1809 + replace(__p, 1, __i, __i_len);
1.1810 + }
1.1811 +
1.1812 + void replace(size_t __p, const _CharT* __c_string) {
1.1813 + replace(__p, 1, __c_string);
1.1814 + }
1.1815 +
1.1816 + void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1.1817 + replace(__p, 1, __i, __j);
1.1818 + }
1.1819 +
1.1820 + void replace(size_t __p, const const_iterator& __i,
1.1821 + const const_iterator& __j) {
1.1822 + replace(__p, 1, __i, __j);
1.1823 + }
1.1824 +
1.1825 + void replace(size_t __p, const iterator& __i,
1.1826 + const iterator& __j) {
1.1827 + replace(__p, 1, __i, __j);
1.1828 + }
1.1829 +
1.1830 + // Erase, (position, size) variant.
1.1831 + void erase(size_t __p, size_t __n) {
1.1832 + if (__p > size()) _M_throw_out_of_range();
1.1833 + _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
1.1834 + }
1.1835 +
1.1836 + // Erase, single character
1.1837 + void erase(size_t __p) {
1.1838 + erase(__p, __p + 1);
1.1839 + }
1.1840 +
1.1841 + // Insert, iterator variants.
1.1842 + iterator insert(const iterator& __p, const _Self& __r)
1.1843 + { insert(__p.index(), __r); return __p; }
1.1844 + iterator insert(const iterator& __p, size_t __n, _CharT __c)
1.1845 + { insert(__p.index(), __n, __c); return __p; }
1.1846 + iterator insert(const iterator& __p, _CharT __c)
1.1847 + { insert(__p.index(), __c); return __p; }
1.1848 + iterator insert(const iterator& __p )
1.1849 + { insert(__p.index()); return __p; }
1.1850 + iterator insert(const iterator& __p, const _CharT* c_string)
1.1851 + { insert(__p.index(), c_string); return __p; }
1.1852 + iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1.1853 + { insert(__p.index(), __i, __n); return __p; }
1.1854 + iterator insert(const iterator& __p, const _CharT* __i,
1.1855 + const _CharT* __j)
1.1856 + { insert(__p.index(), __i, __j); return __p; }
1.1857 + iterator insert(const iterator& __p,
1.1858 + const const_iterator& __i, const const_iterator& __j)
1.1859 + { insert(__p.index(), __i, __j); return __p; }
1.1860 + iterator insert(const iterator& __p,
1.1861 + const iterator& __i, const iterator& __j)
1.1862 + { insert(__p.index(), __i, __j); return __p; }
1.1863 +
1.1864 + // Replace, range variants.
1.1865 + void replace(const iterator& __p, const iterator& __q,
1.1866 + const _Self& __r)
1.1867 + { replace(__p.index(), __q.index() - __p.index(), __r); }
1.1868 + void replace(const iterator& __p, const iterator& __q, _CharT __c)
1.1869 + { replace(__p.index(), __q.index() - __p.index(), __c); }
1.1870 + void replace(const iterator& __p, const iterator& __q,
1.1871 + const _CharT* __c_string)
1.1872 + { replace(__p.index(), __q.index() - __p.index(), __c_string); }
1.1873 + void replace(const iterator& __p, const iterator& __q,
1.1874 + const _CharT* __i, size_t __n)
1.1875 + { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
1.1876 + void replace(const iterator& __p, const iterator& __q,
1.1877 + const _CharT* __i, const _CharT* __j)
1.1878 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.1879 + void replace(const iterator& __p, const iterator& __q,
1.1880 + const const_iterator& __i, const const_iterator& __j)
1.1881 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.1882 + void replace(const iterator& __p, const iterator& __q,
1.1883 + const iterator& __i, const iterator& __j)
1.1884 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.1885 +
1.1886 + // Replace, iterator variants.
1.1887 + void replace(const iterator& __p, const _Self& __r)
1.1888 + { replace(__p.index(), __r); }
1.1889 + void replace(const iterator& __p, _CharT __c)
1.1890 + { replace(__p.index(), __c); }
1.1891 + void replace(const iterator& __p, const _CharT* __c_string)
1.1892 + { replace(__p.index(), __c_string); }
1.1893 + void replace(const iterator& __p, const _CharT* __i, size_t __n)
1.1894 + { replace(__p.index(), __i, __n); }
1.1895 + void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
1.1896 + { replace(__p.index(), __i, __j); }
1.1897 + void replace(const iterator& __p, const_iterator __i,
1.1898 + const_iterator __j)
1.1899 + { replace(__p.index(), __i, __j); }
1.1900 + void replace(const iterator& __p, iterator __i, iterator __j)
1.1901 + { replace(__p.index(), __i, __j); }
1.1902 +
1.1903 + // Iterator and range variants of erase
1.1904 + iterator erase(const iterator& __p, const iterator& __q) {
1.1905 + size_t __p_index = __p.index();
1.1906 + erase(__p_index, __q.index() - __p_index);
1.1907 + return iterator(this, __p_index);
1.1908 + }
1.1909 + iterator erase(const iterator& __p) {
1.1910 + size_t __p_index = __p.index();
1.1911 + erase(__p_index, 1);
1.1912 + return iterator(this, __p_index);
1.1913 + }
1.1914 +
1.1915 + _Self substr(size_t __start, size_t __len = 1) const {
1.1916 + if (__start > size()) _M_throw_out_of_range();
1.1917 + return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
1.1918 + }
1.1919 +
1.1920 + _Self substr(iterator __start, iterator __end) const {
1.1921 + return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1.1922 + }
1.1923 +
1.1924 + _Self substr(iterator __start) const {
1.1925 + size_t __pos = __start.index();
1.1926 + return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1.1927 + }
1.1928 +
1.1929 + _Self substr(const_iterator __start, const_iterator __end) const {
1.1930 + // This might eventually take advantage of the cache in the
1.1931 + // iterator.
1.1932 + return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1.1933 + }
1.1934 +
1.1935 + rope<_CharT,_Alloc> substr(const_iterator __start) {
1.1936 + size_t __pos = __start.index();
1.1937 + return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1.1938 + }
1.1939 +
1.1940 +#include <stl/_string_npos.h>
1.1941 +
1.1942 + size_type find(const _Self& __s, size_type __pos = 0) const {
1.1943 + if (__pos >= size())
1.1944 +# ifndef _STLP_OLD_ROPE_SEMANTICS
1.1945 + return npos;
1.1946 +# else
1.1947 + return size();
1.1948 +# endif
1.1949 +
1.1950 + size_type __result_pos;
1.1951 + const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
1.1952 + __result_pos = __result.index();
1.1953 +# ifndef _STLP_OLD_ROPE_SEMANTICS
1.1954 + if (__result_pos == size()) __result_pos = npos;
1.1955 +# endif
1.1956 + return __result_pos;
1.1957 + }
1.1958 + size_type find(_CharT __c, size_type __pos = 0) const;
1.1959 + size_type find(const _CharT* __s, size_type __pos = 0) const {
1.1960 + size_type __result_pos;
1.1961 + const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
1.1962 + __s, __s + _S_char_ptr_len(__s));
1.1963 + __result_pos = __result.index();
1.1964 +# ifndef _STLP_OLD_ROPE_SEMANTICS
1.1965 + if (__result_pos == size()) __result_pos = npos;
1.1966 +# endif
1.1967 + return __result_pos;
1.1968 + }
1.1969 +
1.1970 + iterator mutable_begin() {
1.1971 + return(iterator(this, 0));
1.1972 + }
1.1973 +
1.1974 + iterator mutable_end() {
1.1975 + return(iterator(this, size()));
1.1976 + }
1.1977 +
1.1978 + reverse_iterator mutable_rbegin() {
1.1979 + return reverse_iterator(mutable_end());
1.1980 + }
1.1981 +
1.1982 + reverse_iterator mutable_rend() {
1.1983 + return reverse_iterator(mutable_begin());
1.1984 + }
1.1985 +
1.1986 + reference mutable_reference_at(size_type __pos) {
1.1987 + return reference(this, __pos);
1.1988 + }
1.1989 +
1.1990 +# ifdef __STD_STUFF
1.1991 + reference operator[] (size_type __pos) {
1.1992 + return reference(this, __pos);
1.1993 + }
1.1994 +
1.1995 + reference at(size_type __pos) {
1.1996 + if (__pos >= size()) _M_throw_out_of_range();
1.1997 + return (*this)[__pos];
1.1998 + }
1.1999 +
1.2000 + void resize(size_type, _CharT) {}
1.2001 + void resize(size_type) {}
1.2002 + void reserve(size_type = 0) {}
1.2003 + size_type capacity() const {
1.2004 + return max_size();
1.2005 + }
1.2006 +
1.2007 + // Stuff below this line is dangerous because it's error prone.
1.2008 + // I would really like to get rid of it.
1.2009 + // copy function with funny arg ordering.
1.2010 + size_type copy(_CharT* __buffer, size_type __n,
1.2011 + size_type __pos = 0) const {
1.2012 + return copy(__pos, __n, __buffer);
1.2013 + }
1.2014 +
1.2015 + iterator end() { return mutable_end(); }
1.2016 +
1.2017 + iterator begin() { return mutable_begin(); }
1.2018 +
1.2019 + reverse_iterator rend() { return mutable_rend(); }
1.2020 +
1.2021 + reverse_iterator rbegin() { return mutable_rbegin(); }
1.2022 +
1.2023 +# else
1.2024 +
1.2025 + const_iterator end() { return const_end(); }
1.2026 +
1.2027 + const_iterator begin() { return const_begin(); }
1.2028 +
1.2029 + const_reverse_iterator rend() { return const_rend(); }
1.2030 +
1.2031 + const_reverse_iterator rbegin() { return const_rbegin(); }
1.2032 +
1.2033 +# endif
1.2034 +}; //class rope
1.2035 +
1.2036 +#if !defined (_STLP_STATIC_CONST_INIT_BUG)
1.2037 +# if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
1.2038 +template <class _CharT, class _Alloc>
1.2039 +const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
1.2040 +# endif
1.2041 +#endif
1.2042 +
1.2043 +template <class _CharT, class _Alloc>
1.2044 +inline _CharT
1.2045 +_Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
1.2046 +{ return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
1.2047 +
1.2048 +template <class _CharT, class _Alloc>
1.2049 +inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2050 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2051 + return (__x._M_current_pos == __y._M_current_pos &&
1.2052 + __x._M_root == __y._M_root);
1.2053 +}
1.2054 +
1.2055 +template <class _CharT, class _Alloc>
1.2056 +inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2057 + const _Rope_const_iterator<_CharT,_Alloc>& __y)
1.2058 +{ return (__x._M_current_pos < __y._M_current_pos); }
1.2059 +
1.2060 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2061 +
1.2062 +template <class _CharT, class _Alloc>
1.2063 +inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2064 + const _Rope_const_iterator<_CharT,_Alloc>& __y)
1.2065 +{ return !(__x == __y); }
1.2066 +
1.2067 +template <class _CharT, class _Alloc>
1.2068 +inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2069 + const _Rope_const_iterator<_CharT,_Alloc>& __y)
1.2070 +{ return __y < __x; }
1.2071 +
1.2072 +template <class _CharT, class _Alloc>
1.2073 +inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2074 + const _Rope_const_iterator<_CharT,_Alloc>& __y)
1.2075 +{ return !(__y < __x); }
1.2076 +
1.2077 +template <class _CharT, class _Alloc>
1.2078 +inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2079 + const _Rope_const_iterator<_CharT,_Alloc>& __y)
1.2080 +{ return !(__x < __y); }
1.2081 +
1.2082 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2083 +
1.2084 +template <class _CharT, class _Alloc>
1.2085 +inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2086 + const _Rope_const_iterator<_CharT,_Alloc>& __y)
1.2087 +{ return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
1.2088 +
1.2089 +#if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
1.2090 +template <class _CharT, class _Alloc>
1.2091 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2092 +operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
1.2093 +{ return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
1.2094 +# endif
1.2095 +
1.2096 +template <class _CharT, class _Alloc>
1.2097 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2098 +operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
1.2099 +{ return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
1.2100 +
1.2101 +template <class _CharT, class _Alloc>
1.2102 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2103 +operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
1.2104 +{ return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
1.2105 +
1.2106 +template <class _CharT, class _Alloc>
1.2107 +inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2108 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2109 + return (__x._M_current_pos == __y._M_current_pos &&
1.2110 + __x._M_root_rope == __y._M_root_rope);
1.2111 +}
1.2112 +
1.2113 +template <class _CharT, class _Alloc>
1.2114 +inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2115 + const _Rope_iterator<_CharT,_Alloc>& __y)
1.2116 +{ return (__x._M_current_pos < __y._M_current_pos); }
1.2117 +
1.2118 +#if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
1.2119 +template <class _CharT, class _Alloc>
1.2120 +inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2121 + const _Rope_iterator<_CharT,_Alloc>& __y)
1.2122 +{ return !(__x == __y); }
1.2123 +
1.2124 +template <class _CharT, class _Alloc>
1.2125 +inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2126 + const _Rope_iterator<_CharT,_Alloc>& __y)
1.2127 +{ return __y < __x; }
1.2128 +
1.2129 +template <class _CharT, class _Alloc>
1.2130 +inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2131 + const _Rope_iterator<_CharT,_Alloc>& __y)
1.2132 +{ return !(__y < __x); }
1.2133 +
1.2134 +template <class _CharT, class _Alloc>
1.2135 +inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2136 + const _Rope_iterator<_CharT,_Alloc>& __y)
1.2137 +{ return !(__x < __y); }
1.2138 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2139 +
1.2140 +template <class _CharT, class _Alloc>
1.2141 +inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2142 + const _Rope_iterator<_CharT,_Alloc>& __y)
1.2143 +{ return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
1.2144 +
1.2145 +#if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
1.2146 +template <class _CharT, class _Alloc>
1.2147 +inline _Rope_iterator<_CharT,_Alloc>
1.2148 +operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2149 + ptrdiff_t __n) {
1.2150 + return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
1.2151 +}
1.2152 +# endif
1.2153 +
1.2154 +template <class _CharT, class _Alloc>
1.2155 +inline _Rope_iterator<_CharT,_Alloc>
1.2156 +operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2157 + ptrdiff_t __n) {
1.2158 + return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
1.2159 +}
1.2160 +
1.2161 +template <class _CharT, class _Alloc>
1.2162 +inline _Rope_iterator<_CharT,_Alloc>
1.2163 +operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
1.2164 + return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
1.2165 +}
1.2166 +
1.2167 +template <class _CharT, class _Alloc>
1.2168 +inline rope<_CharT,_Alloc>
1.2169 +operator+ (const rope<_CharT,_Alloc>& __left,
1.2170 + const rope<_CharT,_Alloc>& __right) {
1.2171 + _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
1.2172 + return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
1.2173 + // Inlining this should make it possible to keep __left and __right in registers.
1.2174 +}
1.2175 +
1.2176 +template <class _CharT, class _Alloc>
1.2177 +inline rope<_CharT,_Alloc>&
1.2178 +operator+= (rope<_CharT,_Alloc>& __left,
1.2179 + const rope<_CharT,_Alloc>& __right) {
1.2180 + __left.append(__right);
1.2181 + return __left;
1.2182 +}
1.2183 +
1.2184 +template <class _CharT, class _Alloc>
1.2185 +inline rope<_CharT,_Alloc>
1.2186 +operator+ (const rope<_CharT,_Alloc>& __left,
1.2187 + const _CharT* __right) {
1.2188 + size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
1.2189 + return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
1.2190 +}
1.2191 +
1.2192 +template <class _CharT, class _Alloc>
1.2193 +inline rope<_CharT,_Alloc>&
1.2194 +operator+= (rope<_CharT,_Alloc>& __left,
1.2195 + const _CharT* __right) {
1.2196 + __left.append(__right);
1.2197 + return __left;
1.2198 +}
1.2199 +
1.2200 +template <class _CharT, class _Alloc>
1.2201 +inline rope<_CharT,_Alloc>
1.2202 +operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
1.2203 + return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
1.2204 +}
1.2205 +
1.2206 +template <class _CharT, class _Alloc>
1.2207 +inline rope<_CharT,_Alloc>&
1.2208 +operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
1.2209 + __left.append(__right);
1.2210 + return __left;
1.2211 +}
1.2212 +
1.2213 +template <class _CharT, class _Alloc>
1.2214 +inline bool
1.2215 +operator< (const rope<_CharT,_Alloc>& __left,
1.2216 + const rope<_CharT,_Alloc>& __right) {
1.2217 + return __left.compare(__right) < 0;
1.2218 +}
1.2219 +
1.2220 +template <class _CharT, class _Alloc>
1.2221 +inline bool
1.2222 +operator== (const rope<_CharT,_Alloc>& __left,
1.2223 + const rope<_CharT,_Alloc>& __right) {
1.2224 + return __left.compare(__right) == 0;
1.2225 +}
1.2226 +
1.2227 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2228 +
1.2229 +template <class _CharT, class _Alloc>
1.2230 +inline bool
1.2231 +operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2232 + return !(__x == __y);
1.2233 +}
1.2234 +
1.2235 +template <class _CharT, class _Alloc>
1.2236 +inline bool
1.2237 +operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2238 + return __y < __x;
1.2239 +}
1.2240 +
1.2241 +template <class _CharT, class _Alloc>
1.2242 +inline bool
1.2243 +operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2244 + return !(__y < __x);
1.2245 +}
1.2246 +
1.2247 +template <class _CharT, class _Alloc>
1.2248 +inline bool
1.2249 +operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2250 + return !(__x < __y);
1.2251 +}
1.2252 +
1.2253 +template <class _CharT, class _Alloc>
1.2254 +inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
1.2255 + const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
1.2256 + return !(__x == __y);
1.2257 +}
1.2258 +
1.2259 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2260 +
1.2261 +template <class _CharT, class _Alloc>
1.2262 +inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
1.2263 + const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
1.2264 + return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
1.2265 +}
1.2266 +
1.2267 +#if !defined (_STLP_USE_NO_IOSTREAMS)
1.2268 +template<class _CharT, class _Traits, class _Alloc>
1.2269 +basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
1.2270 + const rope<_CharT, _Alloc>& __r);
1.2271 +#endif
1.2272 +
1.2273 +typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
1.2274 +#if defined (_STLP_HAS_WCHAR_T)
1.2275 +typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
1.2276 +#endif
1.2277 +
1.2278 +inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
1.2279 +{ return __c.mutable_reference_at(__i); }
1.2280 +
1.2281 +#if defined (_STLP_HAS_WCHAR_T)
1.2282 +inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
1.2283 +{ return __c.mutable_reference_at(__i); }
1.2284 +#endif
1.2285 +
1.2286 +#if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
1.2287 +template <class _CharT, class _Alloc>
1.2288 +inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
1.2289 +{ __x.swap(__y); }
1.2290 +#else
1.2291 +
1.2292 +inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
1.2293 +# ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
1.2294 +inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
1.2295 +# endif
1.2296 +
1.2297 +#endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
1.2298 +
1.2299 +
1.2300 +// Hash functions should probably be revisited later:
1.2301 +_STLP_TEMPLATE_NULL struct hash<crope> {
1.2302 + size_t operator()(const crope& __str) const {
1.2303 + size_t _p_size = __str.size();
1.2304 +
1.2305 + if (0 == _p_size) return 0;
1.2306 + return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
1.2307 + }
1.2308 +};
1.2309 +
1.2310 +#if defined (_STLP_HAS_WCHAR_T) // dwa 8/21/97
1.2311 +_STLP_TEMPLATE_NULL struct hash<wrope> {
1.2312 + size_t operator()(const wrope& __str) const {
1.2313 + size_t _p_size = __str.size();
1.2314 +
1.2315 + if (0 == _p_size) return 0;
1.2316 + return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
1.2317 + }
1.2318 +};
1.2319 +#endif
1.2320 +
1.2321 +#if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
1.2322 +// I couldn't get this to work with VC++
1.2323 +template<class _CharT,class _Alloc>
1.2324 +# if defined (__DMC__) && !defined (__PUT_STATIC_DATA_MEMBERS_HERE)
1.2325 +extern
1.2326 +# endif
1.2327 +void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
1.2328 + _Rope_iterator<_CharT, _Alloc> __middle,
1.2329 + _Rope_iterator<_CharT, _Alloc> __last);
1.2330 +
1.2331 +inline void rotate(_Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __first,
1.2332 + _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __middle,
1.2333 + _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __last)
1.2334 +{ _Rope_rotate(__first, __middle, __last); }
1.2335 +#endif
1.2336 +
1.2337 +template <class _CharT, class _Alloc>
1.2338 +inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
1.2339 + if (_M_current_valid) {
1.2340 + return _M_current;
1.2341 + } else {
1.2342 + return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
1.2343 + }
1.2344 +}
1.2345 +
1.2346 +#if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION)
1.2347 +template <class _CharT, class _Alloc>
1.2348 +struct __move_traits<rope<_CharT, _Alloc> > {
1.2349 + typedef __stlp_movable implemented;
1.2350 + //Completness depends on the allocator:
1.2351 + typedef typename __move_traits<_Alloc>::complete complete;
1.2352 +};
1.2353 +#endif
1.2354 +
1.2355 +_STLP_END_NAMESPACE
1.2356 +
1.2357 +#if !defined (_STLP_LINK_TIME_INSTANTIATION)
1.2358 +# include <stl/_rope.c>
1.2359 +#endif
1.2360 +
1.2361 +#endif /* _STLP_INTERNAL_ROPE_H */
1.2362 +
1.2363 +// Local Variables:
1.2364 +// mode:C++
1.2365 +// End: