1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1.2 +++ b/epoc32/include/stdapis/stlport/stl/_rope.h Wed Mar 31 12:33:34 2010 +0100
1.3 @@ -0,0 +1,2518 @@
1.4 +/*
1.5 + * © Portions copyright (c) 2006-2007 Nokia Corporation. All rights reserved.
1.6 + * Copyright (c) 1996,1997
1.7 + * Silicon Graphics Computer Systems, Inc.
1.8 + *
1.9 + * Copyright (c) 1997
1.10 + * Moscow Center for SPARC Technology
1.11 + *
1.12 + * Copyright (c) 1999
1.13 + * Boris Fomitchev
1.14 + *
1.15 + * This material is provided "as is", with absolutely no warranty expressed
1.16 + * or implied. Any use is at your own risk.
1.17 + *
1.18 + * Permission to use or copy this software for any purpose is hereby granted
1.19 + * without fee, provided the above notices are retained on all copies.
1.20 + * Permission to modify the code and to distribute modified code is granted,
1.21 + * provided the above notices are retained, and a notice that the code was
1.22 + * modified is included with the above copyright notice.
1.23 + *
1.24 + */
1.25 +
1.26 +/* NOTE: This is an internal header file, included by other STL headers.
1.27 + * You should not attempt to use it directly.
1.28 + */
1.29 +
1.30 +// rope<_CharT,_Alloc> is a sequence of _CharT.
1.31 +// Ropes appear to be mutable, but update operations
1.32 +// really copy enough of the data structure to leave the original
1.33 +// valid. Thus ropes can be logically copied by just copying
1.34 +// a pointer value.
1.35 +
1.36 +#ifndef _STLP_INTERNAL_ROPE_H
1.37 +# define _STLP_INTERNAL_ROPE_H
1.38 +
1.39 +# ifndef _STLP_INTERNAL_ALGOBASE_H
1.40 +# include <stl/_algobase.h>
1.41 +# endif
1.42 +
1.43 +# ifndef _STLP_IOSFWD
1.44 +# include <iosfwd>
1.45 +# endif
1.46 +
1.47 +# ifndef _STLP_INTERNAL_ALLOC_H
1.48 +# include <stl/_alloc.h>
1.49 +# endif
1.50 +
1.51 +# ifndef _STLP_INTERNAL_ITERATOR_H
1.52 +# include <stl/_iterator.h>
1.53 +# endif
1.54 +
1.55 +# ifndef _STLP_INTERNAL_ALGO_H
1.56 +# include <stl/_algo.h>
1.57 +# endif
1.58 +
1.59 +# ifndef _STLP_INTERNAL_FUNCTION_H
1.60 +# include <stl/_function.h>
1.61 +# endif
1.62 +
1.63 +# ifndef _STLP_INTERNAL_NUMERIC_H
1.64 +# include <stl/_numeric.h>
1.65 +# endif
1.66 +
1.67 +# ifndef _STLP_INTERNAL_HASH_FUN_H
1.68 +# include <stl/_hash_fun.h>
1.69 +# endif
1.70 +
1.71 +# ifdef __GC
1.72 +# define __GC_CONST const
1.73 +# else
1.74 +# include <stl/_threads.h>
1.75 +# define __GC_CONST // constant except for deallocation
1.76 +# endif
1.77 +# ifdef _STLP_SGI_THREADS
1.78 +# include <mutex.h>
1.79 +# endif
1.80 +
1.81 +#ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
1.82 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
1.83 +#elif defined(__MRC__)||defined(__SC__)
1.84 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
1.85 +#else
1.86 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
1.87 +#endif
1.88 +
1.89 +_STLP_BEGIN_NAMESPACE
1.90 +
1.91 +// First a lot of forward declarations. The standard seems to require
1.92 +// much stricter "declaration before use" than many of the implementations
1.93 +// that preceded it.
1.94 +template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
1.95 +template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
1.96 +template<class _CharT, class _Alloc> struct _Rope_RopeRep;
1.97 +template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
1.98 +template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
1.99 +template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
1.100 +template<class _CharT, class _Alloc> class _Rope_iterator;
1.101 +template<class _CharT, class _Alloc> class _Rope_const_iterator;
1.102 +template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
1.103 +template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
1.104 +
1.105 +// Some helpers, so we can use power on ropes.
1.106 +// See below for why this isn't local to the implementation.
1.107 +
1.108 +// This uses a nonstandard refcount convention.
1.109 +// The result has refcount 0.
1.110 +template<class _CharT, class _Alloc>
1.111 +struct _Rope_Concat_fn
1.112 + : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
1.113 + rope<_CharT,_Alloc> > {
1.114 + rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
1.115 + const rope<_CharT,_Alloc>& __y) {
1.116 + return __x + __y;
1.117 + }
1.118 +};
1.119 +
1.120 +template <class _CharT, class _Alloc>
1.121 +inline
1.122 +rope<_CharT,_Alloc>
1.123 +__identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
1.124 +{
1.125 + return rope<_CharT,_Alloc>();
1.126 +}
1.127 +
1.128 +// The _S_eos function is used for those functions that
1.129 +// convert to/from C-like strings to detect the end of the string.
1.130 +
1.131 +// The end-of-C-string character.
1.132 +// This is what the draft standard says it should be.
1.133 +template <class _CharT>
1.134 +inline _CharT _S_eos(_CharT*) { return _CharT(); }
1.135 +
1.136 +// fbp : some compilers fail to zero-initialize builtins ;(
1.137 +inline const char _S_eos(const char*) { return 0; }
1.138 +# ifdef _STLP_HAS_WCHAR_T
1.139 +inline const wchar_t _S_eos(const wchar_t*) { return 0; }
1.140 +# endif
1.141 +
1.142 +// Test for basic character types.
1.143 +// For basic character types leaves having a trailing eos.
1.144 +template <class _CharT>
1.145 +inline bool _S_is_basic_char_type(_CharT*) { return false; }
1.146 +template <class _CharT>
1.147 +inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
1.148 +
1.149 +inline bool _S_is_basic_char_type(char*) { return true; }
1.150 +inline bool _S_is_one_byte_char_type(char*) { return true; }
1.151 +# ifdef _STLP_HAS_WCHAR_T
1.152 +inline bool _S_is_basic_char_type(wchar_t*) { return true; }
1.153 +# endif
1.154 +
1.155 +// Store an eos iff _CharT is a basic character type.
1.156 +// Do not reference _S_eos if it isn't.
1.157 +template <class _CharT>
1.158 +inline void _S_cond_store_eos(_CharT&) {}
1.159 +
1.160 +inline void _S_cond_store_eos(char& __c) { __c = 0; }
1.161 +# ifdef _STLP_HAS_WCHAR_T
1.162 +inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
1.163 +# endif
1.164 +
1.165 +// char_producers are logically functions that generate a section of
1.166 +// a string. These can be convereted to ropes. The resulting rope
1.167 +// invokes the char_producer on demand. This allows, for example,
1.168 +// files to be viewed as ropes without reading the entire file.
1.169 +template <class _CharT>
1.170 +class char_producer {
1.171 +public:
1.172 + virtual ~char_producer() {};
1.173 + virtual void operator()(size_t __start_pos, size_t __len,
1.174 + _CharT* __buffer) = 0;
1.175 + // Buffer should really be an arbitrary output iterator.
1.176 + // That way we could flatten directly into an ostream, etc.
1.177 + // This is thoroughly impossible, since iterator types don't
1.178 + // have runtime descriptions.
1.179 +};
1.180 +
1.181 +// Sequence buffers:
1.182 +//
1.183 +// Sequence must provide an append operation that appends an
1.184 +// array to the sequence. Sequence buffers are useful only if
1.185 +// appending an entire array is cheaper than appending element by element.
1.186 +// This is true for many string representations.
1.187 +// This should perhaps inherit from ostream<sequence::value_type>
1.188 +// and be implemented correspondingly, so that they can be used
1.189 +// for formatted. For the sake of portability, we don't do this yet.
1.190 +//
1.191 +// For now, sequence buffers behave as output iterators. But they also
1.192 +// behave a little like basic_ostringstream<sequence::value_type> and a
1.193 +// little like containers.
1.194 +
1.195 +template<class _Sequence
1.196 +# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
1.197 + defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
1.198 +, size_t _Buf_sz = 100
1.199 +# if defined(__sgi) && !defined(__GNUC__)
1.200 +# define __TYPEDEF_WORKAROUND
1.201 +,class _V = typename _Sequence::value_type
1.202 +# endif /* __sgi */
1.203 +# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.204 +>
1.205 +// The 3rd parameter works around a common compiler bug.
1.206 +class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
1.207 +public:
1.208 +# ifndef __TYPEDEF_WORKAROUND
1.209 + typedef typename _Sequence::value_type value_type;
1.210 + typedef sequence_buffer<_Sequence
1.211 +# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
1.212 + defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
1.213 + , _Buf_sz
1.214 + > _Self;
1.215 +# else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.216 + > _Self;
1.217 + enum { _Buf_sz = 100};
1.218 +# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.219 + // # endif
1.220 +# else /* __TYPEDEF_WORKAROUND */
1.221 + typedef _V value_type;
1.222 + typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
1.223 +# endif /* __TYPEDEF_WORKAROUND */
1.224 +protected:
1.225 + _Sequence* _M_prefix;
1.226 + value_type _M_buffer[_Buf_sz];
1.227 + size_t _M_buf_count;
1.228 +public:
1.229 + void flush() {
1.230 + _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
1.231 + _M_buf_count = 0;
1.232 + }
1.233 + ~sequence_buffer() { flush(); }
1.234 + sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
1.235 + sequence_buffer(const _Self& __x) {
1.236 + _M_prefix = __x._M_prefix;
1.237 + _M_buf_count = __x._M_buf_count;
1.238 + copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
1.239 + }
1.240 + sequence_buffer(_Self& __x) {
1.241 + __x.flush();
1.242 + _M_prefix = __x._M_prefix;
1.243 + _M_buf_count = 0;
1.244 + }
1.245 + sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
1.246 + _Self& operator= (_Self& __x) {
1.247 + __x.flush();
1.248 + _M_prefix = __x._M_prefix;
1.249 + _M_buf_count = 0;
1.250 + return *this;
1.251 + }
1.252 + _Self& operator= (const _Self& __x) {
1.253 + _M_prefix = __x._M_prefix;
1.254 + _M_buf_count = __x._M_buf_count;
1.255 + copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
1.256 + return *this;
1.257 + }
1.258 + void push_back(value_type __x)
1.259 + {
1.260 + if (_M_buf_count < _Buf_sz) {
1.261 + _M_buffer[_M_buf_count] = __x;
1.262 + ++_M_buf_count;
1.263 + } else {
1.264 + flush();
1.265 + _M_buffer[0] = __x;
1.266 + _M_buf_count = 1;
1.267 + }
1.268 + }
1.269 + void append(value_type* __s, size_t __len)
1.270 + {
1.271 + if (__len + _M_buf_count <= _Buf_sz) {
1.272 + size_t __i = _M_buf_count;
1.273 + size_t __j = 0;
1.274 + for (; __j < __len; __i++, __j++) {
1.275 + _M_buffer[__i] = __s[__j];
1.276 + }
1.277 + _M_buf_count += __len;
1.278 + } else if (0 == _M_buf_count) {
1.279 + _M_prefix->append(__s, __s + __len);
1.280 + } else {
1.281 + flush();
1.282 + append(__s, __len);
1.283 + }
1.284 + }
1.285 + _Self& write(value_type* __s, size_t __len)
1.286 + {
1.287 + append(__s, __len);
1.288 + return *this;
1.289 + }
1.290 + _Self& put(value_type __x)
1.291 + {
1.292 + push_back(__x);
1.293 + return *this;
1.294 + }
1.295 + _Self& operator=(const value_type& __rhs)
1.296 + {
1.297 + push_back(__rhs);
1.298 + return *this;
1.299 + }
1.300 + _Self& operator*() { return *this; }
1.301 + _Self& operator++() { return *this; }
1.302 + _Self& operator++(int) { return *this; }
1.303 +};
1.304 +
1.305 +// The following should be treated as private, at least for now.
1.306 +template<class _CharT>
1.307 +class _Rope_char_consumer {
1.308 +public:
1.309 + // If we had member templates, these should not be virtual.
1.310 + // For now we need to use run-time parametrization where
1.311 + // compile-time would do. _Hence this should all be private
1.312 + // for now.
1.313 + // The symmetry with char_producer is accidental and temporary.
1.314 + virtual ~_Rope_char_consumer() {};
1.315 + virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
1.316 +};
1.317 +
1.318 +//
1.319 +// What follows should really be local to rope. Unfortunately,
1.320 +// that doesn't work, since it makes it impossible to define generic
1.321 +// equality on rope iterators. According to the draft standard, the
1.322 +// template parameters for such an equality operator cannot be inferred
1.323 +// from the occurence of a member class as a parameter.
1.324 +// (SGI compilers in fact allow this, but the __result wouldn't be
1.325 +// portable.)
1.326 +// Similarly, some of the static member functions are member functions
1.327 +// only to avoid polluting the global namespace, and to circumvent
1.328 +// restrictions on type inference for template functions.
1.329 +//
1.330 +
1.331 +//
1.332 +// The internal data structure for representing a rope. This is
1.333 +// private to the implementation. A rope is really just a pointer
1.334 +// to one of these.
1.335 +//
1.336 +// A few basic functions for manipulating this data structure
1.337 +// are members of _RopeRep. Most of the more complex algorithms
1.338 +// are implemented as rope members.
1.339 +//
1.340 +// Some of the static member functions of _RopeRep have identically
1.341 +// named functions in rope that simply invoke the _RopeRep versions.
1.342 +//
1.343 +// A macro to introduce various allocation and deallocation functions
1.344 +// These need to be defined differently depending on whether or not
1.345 +// we are using standard conforming allocators, and whether the allocator
1.346 +// instances have real state. Thus this macro is invoked repeatedly
1.347 +// with different definitions of __ROPE_DEFINE_ALLOC.
1.348 +
1.349 +#if defined (_STLP_MEMBER_TEMPLATE_CLASSES)
1.350 +# define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy) \
1.351 + typedef typename \
1.352 + _Alloc_traits<_Tp,_Alloc>::allocator_type __name##Allocator;
1.353 +
1.354 +#define __ROPE_DEFINE_ALLOCS(__a, _M_proxy) \
1.355 + __ROPE_DEFINE_ALLOC(_CharT,_Data, _M_proxy) /* character data */ \
1.356 + typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
1.357 + __ROPE_DEFINE_ALLOC(__C,_C, _M_proxy) \
1.358 + typedef _Rope_RopeLeaf<_CharT,__a> __L; \
1.359 + __ROPE_DEFINE_ALLOC(__L,_L, _M_proxy) \
1.360 + typedef _Rope_RopeFunction<_CharT,__a> __F; \
1.361 + __ROPE_DEFINE_ALLOC(__F,_F, _M_proxy) \
1.362 + typedef _Rope_RopeSubstring<_CharT,__a> __S; \
1.363 + __ROPE_DEFINE_ALLOC(__S,_S,_M_proxy)
1.364 +#else
1.365 +#define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy)
1.366 +#define __ROPE_DEFINE_ALLOCS(__a, _M_proxy)
1.367 +#endif
1.368 +
1.369 +
1.370 +template<class _CharT, class _Alloc>
1.371 +struct _Rope_RopeRep
1.372 +# ifndef __GC
1.373 + : public _Refcount_Base
1.374 +# endif
1.375 +{
1.376 + typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
1.377 +public:
1.378 +# define __ROPE_MAX_DEPTH 45
1.379 +# define __ROPE_DEPTH_SIZE 46
1.380 + enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
1.381 + enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
1.382 + // Apparently needed by VC++
1.383 + // The data fields of leaves are allocated with some
1.384 + // extra space, to accomodate future growth and for basic
1.385 + // character types, to hold a trailing eos character.
1.386 + enum { _S_alloc_granularity = 8 };
1.387 +
1.388 +
1.389 + _Tag _M_tag:8;
1.390 + bool _M_is_balanced:8;
1.391 +
1.392 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.393 + typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type
1.394 + allocator_type;
1.395 +
1.396 + allocator_type get_allocator() const { return allocator_type(_M_size); }
1.397 +
1.398 + unsigned char _M_depth;
1.399 + __GC_CONST _CharT* _M_c_string;
1.400 + _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
1.401 +
1.402 +# ifdef _STLP_NO_ARROW_OPERATOR
1.403 + _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
1.404 +# endif
1.405 +
1.406 + /* Flattened version of string, if needed. */
1.407 + /* typically 0. */
1.408 + /* If it's not 0, then the memory is owned */
1.409 + /* by this node. */
1.410 + /* In the case of a leaf, this may point to */
1.411 + /* the same memory as the data field. */
1.412 + _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t _p_size,
1.413 + allocator_type __a) :
1.414 +# ifndef __GC
1.415 + _Refcount_Base(1),
1.416 +# endif
1.417 + _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
1.418 + { }
1.419 +# ifdef __GC
1.420 + void _M_incr () {}
1.421 +# endif
1.422 +
1.423 + // fbp : moved from RopeLeaf
1.424 + static size_t _S_rounded_up_size(size_t __n) {
1.425 + size_t __size_with_eos;
1.426 +
1.427 + if (_S_is_basic_char_type((_CharT*)0)) {
1.428 + __size_with_eos = __n + 1;
1.429 + } else {
1.430 + __size_with_eos = __n;
1.431 + }
1.432 +# ifdef __GC
1.433 + return __size_with_eos;
1.434 +# else
1.435 + // Allow slop for in-place expansion.
1.436 + return (__size_with_eos + _S_alloc_granularity-1)
1.437 + &~ (_S_alloc_granularity-1);
1.438 +# endif
1.439 + }
1.440 +
1.441 + static void _S_free_string(__GC_CONST _CharT* __s, size_t __len,
1.442 + allocator_type __a) {
1.443 +
1.444 + if (!_S_is_basic_char_type((_CharT*)0)) {
1.445 + _STLP_STD::_Destroy(__s, __s + __len);
1.446 + }
1.447 + // This has to be a static member, so this gets a bit messy
1.448 +# ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
1.449 + __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
1.450 +# else
1.451 + __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
1.452 +# endif
1.453 + }
1.454 +
1.455 + // Deallocate data section of a leaf.
1.456 + // This shouldn't be a member function.
1.457 + // But its hard to do anything else at the
1.458 + // moment, because it's templatized w.r.t.
1.459 + // an allocator.
1.460 + // Does nothing if __GC is defined.
1.461 +# ifndef __GC
1.462 + void _M_free_c_string();
1.463 + void _M_free_tree();
1.464 + // Deallocate t. Assumes t is not 0.
1.465 + void _M_unref_nonnil()
1.466 + {
1.467 + _M_decr(); if (!_M_ref_count) _M_free_tree();
1.468 + }
1.469 + void _M_ref_nonnil()
1.470 + {
1.471 + _M_incr();
1.472 + }
1.473 + static void _S_unref(_Self* __t)
1.474 + {
1.475 + if (0 != __t) {
1.476 + __t->_M_unref_nonnil();
1.477 + }
1.478 + }
1.479 + static void _S_ref(_Self* __t)
1.480 + {
1.481 + if (0 != __t) __t->_M_incr();
1.482 + }
1.483 + static void _S_free_if_unref(_Self* __t)
1.484 + {
1.485 + if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
1.486 + }
1.487 +# else /* __GC */
1.488 + void _M_unref_nonnil() {}
1.489 + void _M_ref_nonnil() {}
1.490 + static void _S_unref(_Self*) {}
1.491 + static void _S_ref(_Self*) {}
1.492 + static void _S_free_if_unref(_Self*) {}
1.493 +# endif
1.494 +
1.495 + __ROPE_DEFINE_ALLOCS(_Alloc, _M_size)
1.496 + };
1.497 +
1.498 +template<class _CharT, class _Alloc>
1.499 +struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
1.500 +public:
1.501 + __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
1.502 + /* The allocated size is */
1.503 + /* _S_rounded_up_size(size), except */
1.504 + /* in the GC case, in which it */
1.505 + /* doesn't matter. */
1.506 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.507 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.508 + _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t _p_size, allocator_type __a)
1.509 + : _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_leaf, 0, true, _p_size, __a),
1.510 + _M_data(__d)
1.511 + {
1.512 + _STLP_ASSERT(_p_size > 0)
1.513 + if (_S_is_basic_char_type((_CharT *)0)) {
1.514 + // already eos terminated.
1.515 + this->_M_c_string = __d;
1.516 + }
1.517 + }
1.518 +
1.519 +# ifdef _STLP_NO_ARROW_OPERATOR
1.520 + _Rope_RopeLeaf() {}
1.521 + _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
1.522 +# endif
1.523 +
1.524 +// The constructor assumes that d has been allocated with
1.525 + // the proper allocator and the properly padded size.
1.526 + // In contrast, the destructor deallocates the data:
1.527 +# ifndef __GC
1.528 + ~_Rope_RopeLeaf() {
1.529 + if (_M_data != this->_M_c_string) {
1.530 + this->_M_free_c_string();
1.531 + }
1.532 + _Rope_RopeRep<_CharT,_Alloc>::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
1.533 + }
1.534 +# endif
1.535 +};
1.536 +
1.537 +template<class _CharT, class _Alloc>
1.538 +struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
1.539 +public:
1.540 + _Rope_RopeRep<_CharT,_Alloc>* _M_left;
1.541 + _Rope_RopeRep<_CharT,_Alloc>* _M_right;
1.542 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.543 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.544 + _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
1.545 + _Rope_RopeRep<_CharT,_Alloc>* __r,
1.546 + allocator_type __a)
1.547 + : _Rope_RopeRep<_CharT,_Alloc>(
1.548 + _Rope_RopeRep<_CharT,_Alloc>::_S_concat,
1.549 + (max)(__l->_M_depth, __r->_M_depth) + 1, false,
1.550 + __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
1.551 + {}
1.552 +# ifdef _STLP_NO_ARROW_OPERATOR
1.553 + _Rope_RopeConcatenation() {}
1.554 + _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
1.555 +# endif
1.556 +
1.557 +# ifndef __GC
1.558 + ~_Rope_RopeConcatenation() {
1.559 + this->_M_free_c_string();
1.560 + _M_left->_M_unref_nonnil();
1.561 + _M_right->_M_unref_nonnil();
1.562 + }
1.563 +# endif
1.564 +};
1.565 +
1.566 +template<class _CharT, class _Alloc>
1.567 +struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
1.568 +public:
1.569 + char_producer<_CharT>* _M_fn;
1.570 +# ifndef __GC
1.571 + bool _M_delete_when_done; // Char_producer is owned by the
1.572 + // rope and should be explicitly
1.573 + // deleted when the rope becomes
1.574 + // inaccessible.
1.575 +# else
1.576 + // In the GC case, we either register the rope for
1.577 + // finalization, or not. Thus the field is unnecessary;
1.578 + // the information is stored in the collector data structures.
1.579 + // We do need a finalization procedure to be invoked by the
1.580 + // collector.
1.581 + static void _S_fn_finalization_proc(void * __tree, void *) {
1.582 + delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
1.583 + }
1.584 +# endif
1.585 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.586 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.587 +# ifdef _STLP_NO_ARROW_OPERATOR
1.588 + _Rope_RopeFunction() {}
1.589 + _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
1.590 +# endif
1.591 +
1.592 + _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
1.593 + bool __d, allocator_type __a)
1.594 + :
1.595 + _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_function, 0, true, _p_size, __a),
1.596 + _M_fn(__f)
1.597 +# ifndef __GC
1.598 + , _M_delete_when_done(__d)
1.599 +# endif
1.600 + {
1.601 + _STLP_ASSERT(_p_size > 0)
1.602 +# ifdef __GC
1.603 + if (__d) {
1.604 + GC_REGISTER_FINALIZER(
1.605 + this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
1.606 + }
1.607 +# endif
1.608 + }
1.609 +# ifndef __GC
1.610 + ~_Rope_RopeFunction() {
1.611 + this->_M_free_c_string();
1.612 + if (_M_delete_when_done) {
1.613 + delete _M_fn;
1.614 + }
1.615 + }
1.616 +# endif
1.617 +};
1.618 +// Substring results are usually represented using just
1.619 +// concatenation nodes. But in the case of very long flat ropes
1.620 +// or ropes with a functional representation that isn't practical.
1.621 +// In that case, we represent the __result as a special case of
1.622 +// RopeFunction, whose char_producer points back to the rope itself.
1.623 +// In all cases except repeated substring operations and
1.624 +// deallocation, we treat the __result as a RopeFunction.
1.625 +template<class _CharT, class _Alloc>
1.626 +# if ( defined (__IBMCPP__) && (__IBMCPP__ == 500) ) // JFA 10-Aug-2000 for some reason xlC cares about the order
1.627 +struct _Rope_RopeSubstring : public char_producer<_CharT> , public _Rope_RopeFunction<_CharT,_Alloc>
1.628 +# else
1.629 +struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
1.630 + public char_producer<_CharT>
1.631 +# endif
1.632 +{
1.633 +public:
1.634 + // XXX this whole class should be rewritten.
1.635 + typedef _Rope_RopeRep<_CharT,_Alloc> _Base;
1.636 + _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
1.637 + size_t _M_start;
1.638 + virtual void operator()(size_t __start_pos, size_t __req_len,
1.639 + _CharT* __buffer) {
1.640 + switch(_M_base->_M_tag) {
1.641 + case _Base::_S_function:
1.642 + case _Base::_S_substringfn:
1.643 + {
1.644 + char_producer<_CharT>* __fn =
1.645 + ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
1.646 + _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
1.647 + _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
1.648 + (*__fn)(__start_pos + _M_start, __req_len, __buffer);
1.649 + }
1.650 + break;
1.651 + case _Base::_S_leaf:
1.652 + {
1.653 + __GC_CONST _CharT* __s =
1.654 + ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
1.655 + uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
1.656 + __buffer);
1.657 + }
1.658 + break;
1.659 + default:
1.660 + _STLP_ASSERT(false)
1.661 + ;
1.662 + }
1.663 + }
1.664 +
1.665 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.666 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.667 +
1.668 + _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1.669 + size_t __l, allocator_type __a)
1.670 + : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
1.671 + _M_base(__b),
1.672 + _M_start(__s)
1.673 +
1.674 + {
1.675 + _STLP_ASSERT(__l > 0)
1.676 + _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
1.677 +# ifndef __GC
1.678 + _M_base->_M_ref_nonnil();
1.679 +# endif
1.680 + this->_M_tag = _Base::_S_substringfn;
1.681 + }
1.682 + virtual ~_Rope_RopeSubstring()
1.683 + {
1.684 +# ifndef __GC
1.685 + _M_base->_M_unref_nonnil();
1.686 +# endif
1.687 + }
1.688 +};
1.689 +
1.690 +// Self-destructing pointers to Rope_rep.
1.691 +// These are not conventional smart pointers. Their
1.692 +// only purpose in life is to ensure that unref is called
1.693 +// on the pointer either at normal exit or if an exception
1.694 +// is raised. It is the caller's responsibility to
1.695 +// adjust reference counts when these pointers are initialized
1.696 +// or assigned to. (This convention significantly reduces
1.697 +// the number of potentially expensive reference count
1.698 +// updates.)
1.699 +#ifndef __GC
1.700 +template<class _CharT, class _Alloc>
1.701 +struct _Rope_self_destruct_ptr {
1.702 + _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
1.703 + ~_Rope_self_destruct_ptr()
1.704 + { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
1.705 +# ifdef _STLP_USE_EXCEPTIONS
1.706 + _Rope_self_destruct_ptr() : _M_ptr(0) {};
1.707 +# else
1.708 + _Rope_self_destruct_ptr() {};
1.709 +# endif
1.710 + _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
1.711 + _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
1.712 + _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
1.713 + operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
1.714 + _Rope_self_destruct_ptr<_CharT, _Alloc>&
1.715 + operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
1.716 + { _M_ptr = __x; return *this; }
1.717 +};
1.718 +#endif
1.719 +
1.720 +// Dereferencing a nonconst iterator has to return something
1.721 +// that behaves almost like a reference. It's not possible to
1.722 +// return an actual reference since assignment requires extra
1.723 +// work. And we would get into the same problems as with the
1.724 +// CD2 version of basic_string.
1.725 +template<class _CharT, class _Alloc>
1.726 +class _Rope_char_ref_proxy {
1.727 + typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
1.728 + friend class rope<_CharT,_Alloc>;
1.729 + friend class _Rope_iterator<_CharT,_Alloc>;
1.730 + friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1.731 +# ifdef __GC
1.732 + typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1.733 +# else
1.734 + typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1.735 +# endif
1.736 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.737 + typedef rope<_CharT,_Alloc> _My_rope;
1.738 + size_t _M_pos;
1.739 + _CharT _M_current;
1.740 + bool _M_current_valid;
1.741 + _My_rope* _M_root; // The whole rope.
1.742 +public:
1.743 + _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
1.744 + _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
1.745 + _Rope_char_ref_proxy(const _Self& __x) :
1.746 + _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
1.747 + // Don't preserve cache if the reference can outlive the
1.748 + // expression. We claim that's not possible without calling
1.749 + // a copy constructor or generating reference to a proxy
1.750 + // reference. We declare the latter to have undefined semantics.
1.751 + _Rope_char_ref_proxy(_My_rope* __r, size_t __p,
1.752 + _CharT __c) :
1.753 + _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
1.754 + inline operator _CharT () const;
1.755 + _Self& operator= (_CharT __c);
1.756 + _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
1.757 + _Self& operator= (const _Self& __c) {
1.758 + return operator=((_CharT)__c);
1.759 + }
1.760 +};
1.761 +
1.762 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
1.763 +template<class _CharT, class __Alloc>
1.764 +inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
1.765 + _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
1.766 + _CharT __tmp = __a;
1.767 + __a = __b;
1.768 + __b = __tmp;
1.769 +}
1.770 +#else
1.771 +// There is no really acceptable way to handle this. The default
1.772 +// definition of swap doesn't work for proxy references.
1.773 +// It can't really be made to work, even with ugly hacks, since
1.774 +// the only unusual operation it uses is the copy constructor, which
1.775 +// is needed for other purposes. We provide a macro for
1.776 +// full specializations, and instantiate the most common case.
1.777 +# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
1.778 + inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
1.779 + _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
1.780 + _CharT __tmp = __a; \
1.781 + __a = __b; \
1.782 + __b = __tmp; \
1.783 + }
1.784 +
1.785 +_ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
1.786 +
1.787 +#endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
1.788 +
1.789 + template<class _CharT, class _Alloc>
1.790 +class _Rope_char_ptr_proxy {
1.791 + // XXX this class should be rewritten.
1.792 +public:
1.793 + typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
1.794 + friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1.795 + size_t _M_pos;
1.796 + rope<_CharT,_Alloc>* _M_root; // The whole rope.
1.797 +
1.798 + _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
1.799 + : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
1.800 + _Rope_char_ptr_proxy(const _Self& __x)
1.801 + : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
1.802 + _Rope_char_ptr_proxy() {}
1.803 + _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
1.804 + _STLP_ASSERT(0 == __x)
1.805 + }
1.806 + _Self&
1.807 + operator= (const _Self& __x) {
1.808 + _M_pos = __x._M_pos;
1.809 + _M_root = __x._M_root;
1.810 + return *this;
1.811 + }
1.812 +
1.813 + _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
1.814 + return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
1.815 + }
1.816 +};
1.817 +
1.818 +
1.819 +// Rope iterators:
1.820 +// Unlike in the C version, we cache only part of the stack
1.821 +// for rope iterators, since they must be efficiently copyable.
1.822 +// When we run out of cache, we have to reconstruct the iterator
1.823 +// value.
1.824 +// Pointers from iterators are not included in reference counts.
1.825 +// Iterators are assumed to be thread private. Ropes can
1.826 +// be shared.
1.827 +
1.828 +template<class _CharT, class _Alloc>
1.829 +class _Rope_iterator_base
1.830 +/* : public random_access_iterator<_CharT, ptrdiff_t> */
1.831 +{
1.832 + friend class rope<_CharT,_Alloc>;
1.833 + typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
1.834 +public:
1.835 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.836 + // Borland doesnt want this to be protected.
1.837 + // protected:
1.838 + enum { _S_path_cache_len = 4 }; // Must be <= 9.
1.839 + enum { _S_iterator_buf_len = 15 };
1.840 + size_t _M_current_pos;
1.841 + _RopeRep* _M_root; // The whole rope.
1.842 + size_t _M_leaf_pos; // Starting position for current leaf
1.843 + __GC_CONST _CharT* _M_buf_start;
1.844 + // Buffer possibly
1.845 + // containing current char.
1.846 + __GC_CONST _CharT* _M_buf_ptr;
1.847 + // Pointer to current char in buffer.
1.848 + // != 0 ==> buffer valid.
1.849 + __GC_CONST _CharT* _M_buf_end;
1.850 + // One past __last valid char in buffer.
1.851 + // What follows is the path cache. We go out of our
1.852 + // way to make this compact.
1.853 + // Path_end contains the bottom section of the path from
1.854 + // the root to the current leaf.
1.855 + const _RopeRep* _M_path_end[_S_path_cache_len];
1.856 + int _M_leaf_index; // Last valid __pos in path_end;
1.857 + // _M_path_end[0] ... _M_path_end[leaf_index-1]
1.858 + // point to concatenation nodes.
1.859 + unsigned char _M_path_directions;
1.860 + // (path_directions >> __i) & 1 is 1
1.861 + // iff we got from _M_path_end[leaf_index - __i - 1]
1.862 + // to _M_path_end[leaf_index - __i] by going to the
1.863 + // __right. Assumes path_cache_len <= 9.
1.864 + _CharT _M_tmp_buf[_S_iterator_buf_len];
1.865 + // Short buffer for surrounding chars.
1.866 + // This is useful primarily for
1.867 + // RopeFunctions. We put the buffer
1.868 + // here to avoid locking in the
1.869 + // multithreaded case.
1.870 + // The cached path is generally assumed to be valid
1.871 + // only if the buffer is valid.
1.872 + static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.873 + // Set buffer contents given
1.874 + // path cache.
1.875 + static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.876 + // Set buffer contents and
1.877 + // path cache.
1.878 + static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.879 + // As above, but assumes path
1.880 + // cache is valid for previous posn.
1.881 + _Rope_iterator_base() {}
1.882 + _Rope_iterator_base(_RopeRep* __root, size_t __pos)
1.883 + : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
1.884 + void _M_incr(size_t __n);
1.885 + void _M_decr(size_t __n);
1.886 +public:
1.887 + size_t index() const { return _M_current_pos; }
1.888 + _Rope_iterator_base(const _Self& __x) {
1.889 + if (0 != __x._M_buf_ptr) {
1.890 + *this = __x;
1.891 + } else {
1.892 + _M_current_pos = __x._M_current_pos;
1.893 + _M_root = __x._M_root;
1.894 + _M_buf_ptr = 0;
1.895 + }
1.896 + }
1.897 +};
1.898 +
1.899 +template<class _CharT, class _Alloc> class _Rope_iterator;
1.900 +
1.901 +template<class _CharT, class _Alloc>
1.902 +class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1.903 + friend class rope<_CharT,_Alloc>;
1.904 + typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
1.905 + typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
1.906 + // protected:
1.907 +public:
1.908 +# ifndef _STLP_HAS_NO_NAMESPACES
1.909 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.910 + // The one from the base class may not be directly visible.
1.911 +# endif
1.912 + _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
1.913 + _Rope_iterator_base<_CharT,_Alloc>(
1.914 + __CONST_CAST(_RopeRep*,__root), __pos)
1.915 + // Only nonconst iterators modify root ref count
1.916 + {}
1.917 +public:
1.918 + typedef _CharT reference; // Really a value. Returning a reference
1.919 + // Would be a mess, since it would have
1.920 + // to be included in refcount.
1.921 + typedef const _CharT* pointer;
1.922 + typedef _CharT value_type;
1.923 + typedef ptrdiff_t difference_type;
1.924 + typedef random_access_iterator_tag iterator_category;
1.925 +
1.926 +public:
1.927 + _Rope_const_iterator() {};
1.928 + _Rope_const_iterator(const _Self& __x) :
1.929 + _Rope_iterator_base<_CharT,_Alloc>(__x) { }
1.930 + _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
1.931 + _Rope_iterator_base<_CharT,_Alloc>(__x) {}
1.932 + _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
1.933 + _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
1.934 + _Self& operator= (const _Self& __x) {
1.935 + if (0 != __x._M_buf_ptr) {
1.936 + *(__STATIC_CAST(_Base*,this)) = __x;
1.937 + } else {
1.938 + this->_M_current_pos = __x._M_current_pos;
1.939 + this->_M_root = __x._M_root;
1.940 + this->_M_buf_ptr = 0;
1.941 + }
1.942 + return(*this);
1.943 + }
1.944 + reference operator*() {
1.945 + if (0 == this->_M_buf_ptr) _S_setcache(*this);
1.946 + return *(this->_M_buf_ptr);
1.947 + }
1.948 + _Self& operator++() {
1.949 + __GC_CONST _CharT* __next;
1.950 + if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
1.951 + this->_M_buf_ptr = __next;
1.952 + ++this->_M_current_pos;
1.953 + } else {
1.954 + this->_M_incr(1);
1.955 + }
1.956 + return *this;
1.957 + }
1.958 + _Self& operator+=(ptrdiff_t __n) {
1.959 + if (__n >= 0) {
1.960 + this->_M_incr(__n);
1.961 + } else {
1.962 + this->_M_decr(-__n);
1.963 + }
1.964 + return *this;
1.965 + }
1.966 + _Self& operator--() {
1.967 + this->_M_decr(1);
1.968 + return *this;
1.969 + }
1.970 + _Self& operator-=(ptrdiff_t __n) {
1.971 + if (__n >= 0) {
1.972 + this->_M_decr(__n);
1.973 + } else {
1.974 + this->_M_incr(-__n);
1.975 + }
1.976 + return *this;
1.977 + }
1.978 + _Self operator++(int) {
1.979 + size_t __old_pos = this->_M_current_pos;
1.980 + this->_M_incr(1);
1.981 + return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1.982 + // This makes a subsequent dereference expensive.
1.983 + // Perhaps we should instead copy the iterator
1.984 + // if it has a valid cache?
1.985 + }
1.986 + _Self operator--(int) {
1.987 + size_t __old_pos = this->_M_current_pos;
1.988 + this->_M_decr(1);
1.989 + return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1.990 + }
1.991 + inline reference operator[](size_t __n);
1.992 +};
1.993 +
1.994 +template<class _CharT, class _Alloc>
1.995 +class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1.996 + friend class rope<_CharT,_Alloc>;
1.997 + typedef _Rope_iterator<_CharT, _Alloc> _Self;
1.998 + typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
1.999 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.1000 + // protected:
1.1001 +public:
1.1002 + rope<_CharT,_Alloc>* _M_root_rope;
1.1003 + // root is treated as a cached version of this,
1.1004 + // and is used to detect changes to the underlying
1.1005 + // rope.
1.1006 + // Root is included in the reference count.
1.1007 + // This is necessary so that we can detect changes reliably.
1.1008 + // Unfortunately, it requires careful bookkeeping for the
1.1009 + // nonGC case.
1.1010 + _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
1.1011 +
1.1012 + void _M_check();
1.1013 +public:
1.1014 + typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1.1015 + typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1.1016 + typedef _CharT value_type;
1.1017 + typedef ptrdiff_t difference_type;
1.1018 + typedef random_access_iterator_tag iterator_category;
1.1019 +public:
1.1020 + ~_Rope_iterator() //*TY 5/6/00 - added dtor to balance reference count
1.1021 + {
1.1022 + _RopeRep::_S_unref(this->_M_root);
1.1023 + }
1.1024 +
1.1025 + rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1.1026 + _Rope_iterator() {
1.1027 + this->_M_root = 0; // Needed for reference counting.
1.1028 + };
1.1029 + _Rope_iterator(const _Self& __x) :
1.1030 + _Rope_iterator_base<_CharT,_Alloc>(__x) {
1.1031 + _M_root_rope = __x._M_root_rope;
1.1032 + _RopeRep::_S_ref(this->_M_root);
1.1033 + }
1.1034 + _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1.1035 + _Self& operator= (const _Self& __x) {
1.1036 + _RopeRep* __old = this->_M_root;
1.1037 +
1.1038 + _RopeRep::_S_ref(__x._M_root);
1.1039 + if (0 != __x._M_buf_ptr) {
1.1040 + _M_root_rope = __x._M_root_rope;
1.1041 + *(__STATIC_CAST(_Base*,this)) = __x;
1.1042 + } else {
1.1043 + this->_M_current_pos = __x._M_current_pos;
1.1044 + this->_M_root = __x._M_root;
1.1045 + _M_root_rope = __x._M_root_rope;
1.1046 + this->_M_buf_ptr = 0;
1.1047 + }
1.1048 + _RopeRep::_S_unref(__old);
1.1049 + return(*this);
1.1050 + }
1.1051 + reference operator*() {
1.1052 + _M_check();
1.1053 + if (0 == this->_M_buf_ptr) {
1.1054 + return _Rope_char_ref_proxy<_CharT,_Alloc>(
1.1055 + _M_root_rope, this->_M_current_pos);
1.1056 + } else {
1.1057 + return _Rope_char_ref_proxy<_CharT,_Alloc>(
1.1058 + _M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
1.1059 + }
1.1060 + }
1.1061 + _Self& operator++() {
1.1062 + this->_M_incr(1);
1.1063 + return *this;
1.1064 + }
1.1065 + _Self& operator+=(ptrdiff_t __n) {
1.1066 + if (__n >= 0) {
1.1067 + this->_M_incr(__n);
1.1068 + } else {
1.1069 + this->_M_decr(-__n);
1.1070 + }
1.1071 + return *this;
1.1072 + }
1.1073 + _Self& operator--() {
1.1074 + this->_M_decr(1);
1.1075 + return *this;
1.1076 + }
1.1077 + _Self& operator-=(ptrdiff_t __n) {
1.1078 + if (__n >= 0) {
1.1079 + this->_M_decr(__n);
1.1080 + } else {
1.1081 + this->_M_incr(-__n);
1.1082 + }
1.1083 + return *this;
1.1084 + }
1.1085 + _Self operator++(int) {
1.1086 + size_t __old_pos = this->_M_current_pos;
1.1087 + this->_M_incr(1);
1.1088 + return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1.1089 + }
1.1090 + _Self operator--(int) {
1.1091 + size_t __old_pos = this->_M_current_pos;
1.1092 + this->_M_decr(1);
1.1093 + return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1.1094 + }
1.1095 + reference operator[](ptrdiff_t __n) {
1.1096 + return _Rope_char_ref_proxy<_CharT,_Alloc>(
1.1097 + _M_root_rope, this->_M_current_pos + __n);
1.1098 + }
1.1099 +};
1.1100 +
1.1101 +# ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1.1102 +template <class _CharT, class _Alloc>
1.1103 +inline random_access_iterator_tag
1.1104 +iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
1.1105 +template <class _CharT, class _Alloc>
1.1106 +inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1.1107 +template <class _CharT, class _Alloc>
1.1108 +inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1.1109 +template <class _CharT, class _Alloc>
1.1110 +inline random_access_iterator_tag
1.1111 +iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1.1112 +template <class _CharT, class _Alloc>
1.1113 +inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1.1114 +template <class _CharT, class _Alloc>
1.1115 +inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1.1116 +#endif
1.1117 +
1.1118 +template <class _CharT, class _Alloc>
1.1119 +class rope {
1.1120 + typedef rope<_CharT,_Alloc> _Self;
1.1121 +public:
1.1122 + typedef _CharT value_type;
1.1123 + typedef ptrdiff_t difference_type;
1.1124 + typedef size_t size_type;
1.1125 + typedef _CharT const_reference;
1.1126 + typedef const _CharT* const_pointer;
1.1127 + typedef _Rope_iterator<_CharT,_Alloc> iterator;
1.1128 + typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1.1129 + typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1.1130 + typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1.1131 +
1.1132 + friend class _Rope_iterator<_CharT,_Alloc>;
1.1133 + friend class _Rope_const_iterator<_CharT,_Alloc>;
1.1134 + friend struct _Rope_RopeRep<_CharT,_Alloc>;
1.1135 + friend class _Rope_iterator_base<_CharT,_Alloc>;
1.1136 + friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1.1137 + friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1.1138 + friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1.1139 +
1.1140 + _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1.1141 +
1.1142 +protected:
1.1143 + typedef __GC_CONST _CharT* _Cstrptr;
1.1144 +
1.1145 + static _CharT _S_empty_c_str[1];
1.1146 +
1.1147 + static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1.1148 + enum { _S_copy_max = 23 };
1.1149 + // For strings shorter than _S_copy_max, we copy to
1.1150 + // concatenate.
1.1151 +
1.1152 +public:
1.1153 + typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1.1154 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.1155 + typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1.1156 + allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1.1157 +public:
1.1158 + // The only data member of a rope:
1.1159 + _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1.1160 +
1.1161 + typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1.1162 + typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1.1163 + typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1.1164 + typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1.1165 +
1.1166 +
1.1167 +
1.1168 + // Retrieve a character at the indicated position.
1.1169 + static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1.1170 +
1.1171 +# ifndef __GC
1.1172 + // Obtain a pointer to the character at the indicated position.
1.1173 + // The pointer can be used to change the character.
1.1174 + // If such a pointer cannot be produced, as is frequently the
1.1175 + // case, 0 is returned instead.
1.1176 + // (Returns nonzero only if all nodes in the path have a refcount
1.1177 + // of 1.)
1.1178 + static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1.1179 +# endif
1.1180 +
1.1181 + static bool _S_apply_to_pieces(
1.1182 + // should be template parameter
1.1183 + _Rope_char_consumer<_CharT>& __c,
1.1184 + const _RopeRep* __r,
1.1185 + size_t __begin, size_t __end);
1.1186 + // begin and end are assumed to be in range.
1.1187 +
1.1188 +# ifndef __GC
1.1189 + static void _S_unref(_RopeRep* __t)
1.1190 + {
1.1191 + _RopeRep::_S_unref(__t);
1.1192 + }
1.1193 + static void _S_ref(_RopeRep* __t)
1.1194 + {
1.1195 + _RopeRep::_S_ref(__t);
1.1196 + }
1.1197 +# else /* __GC */
1.1198 + static void _S_unref(_RopeRep*) {}
1.1199 + static void _S_ref(_RopeRep*) {}
1.1200 +# endif
1.1201 +
1.1202 +
1.1203 +# ifdef __GC
1.1204 + typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1.1205 +# else
1.1206 + typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1.1207 +# endif
1.1208 +
1.1209 + // _Result is counted in refcount.
1.1210 + static _RopeRep* _S_substring(_RopeRep* __base,
1.1211 + size_t __start, size_t __endp1);
1.1212 +
1.1213 + static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1.1214 + const _CharT* __iter, size_t __slen);
1.1215 + // Concatenate rope and char ptr, copying __s.
1.1216 + // Should really take an arbitrary iterator.
1.1217 + // Result is counted in refcount.
1.1218 + static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1.1219 + const _CharT* __iter, size_t __slen)
1.1220 + // As above, but one reference to __r is about to be
1.1221 + // destroyed. Thus the pieces may be recycled if all
1.1222 + // relevent reference counts are 1.
1.1223 +# ifdef __GC
1.1224 + // We can't really do anything since refcounts are unavailable.
1.1225 + { return _S_concat_char_iter(__r, __iter, __slen); }
1.1226 +# else
1.1227 + ;
1.1228 +# endif
1.1229 +
1.1230 + static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1.1231 + // General concatenation on _RopeRep. _Result
1.1232 + // has refcount of 1. Adjusts argument refcounts.
1.1233 +
1.1234 +public:
1.1235 + void apply_to_pieces( size_t __begin, size_t __end,
1.1236 + _Rope_char_consumer<_CharT>& __c) const {
1.1237 + _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end);
1.1238 + }
1.1239 +
1.1240 +
1.1241 +protected:
1.1242 +
1.1243 + static size_t _S_rounded_up_size(size_t __n) {
1.1244 + return _RopeRep::_S_rounded_up_size(__n);
1.1245 + }
1.1246 +
1.1247 + static size_t _S_allocated_capacity(size_t __n) {
1.1248 + if (_S_is_basic_char_type((_CharT*)0)) {
1.1249 + return _S_rounded_up_size(__n) - 1;
1.1250 + } else {
1.1251 + return _S_rounded_up_size(__n);
1.1252 + }
1.1253 + }
1.1254 +
1.1255 + // Allocate and construct a RopeLeaf using the supplied allocator
1.1256 + // Takes ownership of s instead of copying.
1.1257 + static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1.1258 + size_t _p_size, allocator_type __a)
1.1259 + {
1.1260 + _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _RopeLeaf).allocate(1,(const void*)0);
1.1261 + _STLP_TRY {
1.1262 + _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1.1263 + }
1.1264 + _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1265 + _RopeLeaf).deallocate(__space, 1))
1.1266 + return __space;
1.1267 + }
1.1268 +
1.1269 + static _RopeConcatenation* _S_new_RopeConcatenation(
1.1270 + _RopeRep* __left, _RopeRep* __right,
1.1271 + allocator_type __a)
1.1272 + {
1.1273 + _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1274 + _RopeConcatenation).allocate(1,(const void*)0);
1.1275 + return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1.1276 + }
1.1277 +
1.1278 + static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1.1279 + size_t _p_size, bool __d, allocator_type __a)
1.1280 + {
1.1281 + _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1282 + _RopeFunction).allocate(1,(const void*)0);
1.1283 + return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1.1284 + }
1.1285 +
1.1286 + static _RopeSubstring* _S_new_RopeSubstring(
1.1287 + _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1.1288 + size_t __l, allocator_type __a)
1.1289 + {
1.1290 + _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1291 + _RopeSubstring).allocate(1,(const void*)0);
1.1292 + return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1.1293 + }
1.1294 +
1.1295 +# define _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _p_size, __a) \
1.1296 + _S_RopeLeaf_from_unowned_char_ptr(__s, _p_size, __a)
1.1297 +
1.1298 + static
1.1299 + _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1.1300 + size_t _p_size, allocator_type __a)
1.1301 + {
1.1302 + if (0 == _p_size) return 0;
1.1303 +
1.1304 + _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1.1305 +
1.1306 + uninitialized_copy_n(__s, _p_size, __buf);
1.1307 + _S_cond_store_eos(__buf[_p_size]);
1.1308 +
1.1309 + _STLP_TRY {
1.1310 + return _S_new_RopeLeaf(__buf, _p_size, __a);
1.1311 + }
1.1312 + _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1.1313 +
1.1314 +# if defined (_STLP_THROW_RETURN_BUG)
1.1315 + return 0;
1.1316 +# endif
1.1317 + }
1.1318 +
1.1319 +
1.1320 + // Concatenation of nonempty strings.
1.1321 + // Always builds a concatenation node.
1.1322 + // Rebalances if the result is too deep.
1.1323 + // Result has refcount 1.
1.1324 + // Does not increment left and right ref counts even though
1.1325 + // they are referenced.
1.1326 + static _RopeRep*
1.1327 + _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1.1328 +
1.1329 + // Concatenation helper functions
1.1330 + static _RopeLeaf*
1.1331 + _S_leaf_concat_char_iter(_RopeLeaf* __r,
1.1332 + const _CharT* __iter, size_t __slen);
1.1333 + // Concatenate by copying leaf.
1.1334 + // should take an arbitrary iterator
1.1335 + // result has refcount 1.
1.1336 +# ifndef __GC
1.1337 + static _RopeLeaf* _S_destr_leaf_concat_char_iter
1.1338 + (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1.1339 + // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1.1340 +# endif
1.1341 +
1.1342 +
1.1343 + // A helper function for exponentiating strings.
1.1344 + // This uses a nonstandard refcount convention.
1.1345 + // The result has refcount 0.
1.1346 + friend struct _Rope_Concat_fn<_CharT,_Alloc>;
1.1347 + typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1.1348 +
1.1349 +public:
1.1350 + static size_t _S_char_ptr_len(const _CharT* __s) {
1.1351 + const _CharT* __p = __s;
1.1352 +
1.1353 + while (!_S_is0(*__p)) { ++__p; }
1.1354 + return (__p - __s);
1.1355 + }
1.1356 +
1.1357 +public: /* for operators */
1.1358 + rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1.1359 + : _M_tree_ptr(__a, __t) { }
1.1360 +private:
1.1361 + // Copy __r to the _CharT buffer.
1.1362 + // Returns __buffer + __r->_M_size._M_data.
1.1363 + // Assumes that buffer is uninitialized.
1.1364 + static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1.1365 +
1.1366 + // Again, with explicit starting position and length.
1.1367 + // Assumes that buffer is uninitialized.
1.1368 + static _CharT* _S_flatten(_RopeRep* __r,
1.1369 + size_t __start, size_t __len,
1.1370 + _CharT* __buffer);
1.1371 +
1.1372 + // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1.1373 +public:
1.1374 + static const unsigned long _S_min_len[46];
1.1375 +protected:
1.1376 + static bool _S_is_balanced(_RopeRep* __r)
1.1377 + { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1.1378 +
1.1379 + static bool _S_is_almost_balanced(_RopeRep* __r)
1.1380 + { return (__r->_M_depth == 0 ||
1.1381 + __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]); }
1.1382 +
1.1383 + static bool _S_is_roughly_balanced(_RopeRep* __r)
1.1384 + { return (__r->_M_depth <= 1 ||
1.1385 + __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]); }
1.1386 +
1.1387 + // Assumes the result is not empty.
1.1388 + static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1.1389 + _RopeRep* __right)
1.1390 + {
1.1391 + _RopeRep* __result = _S_concat_rep(__left, __right);
1.1392 + if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1.1393 + return __result;
1.1394 + }
1.1395 +
1.1396 + // The basic rebalancing operation. Logically copies the
1.1397 + // rope. The result has refcount of 1. The client will
1.1398 + // usually decrement the reference count of __r.
1.1399 + // The result is within height 2 of balanced by the above
1.1400 + // definition.
1.1401 + static _RopeRep* _S_balance(_RopeRep* __r);
1.1402 +
1.1403 + // Add all unbalanced subtrees to the forest of balanceed trees.
1.1404 + // Used only by balance.
1.1405 + static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1.1406 +
1.1407 + // Add __r to forest, assuming __r is already balanced.
1.1408 + static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1.1409 +
1.1410 + // Print to stdout, exposing structure
1.1411 + static void _S_dump(_RopeRep* __r, int __indent = 0);
1.1412 +
1.1413 + // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1.1414 + static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1.1415 +
1.1416 +public:
1.1417 + bool empty() const { return 0 == _M_tree_ptr._M_data; }
1.1418 +
1.1419 + // Comparison member function. This is public only for those
1.1420 + // clients that need a ternary comparison. Others
1.1421 + // should use the comparison operators below.
1.1422 + int compare(const _Self& __y) const {
1.1423 + return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1.1424 + }
1.1425 +
1.1426 + rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1.1427 + : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),__a))
1.1428 + { }
1.1429 +
1.1430 + rope(const _CharT* __s, size_t __len,
1.1431 + const allocator_type& __a = allocator_type())
1.1432 + : _M_tree_ptr(__a, (_STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a)))
1.1433 + { }
1.1434 +
1.1435 + // Should perhaps be templatized with respect to the iterator type
1.1436 + // and use Sequence_buffer. (It should perhaps use sequence_buffer
1.1437 + // even now.)
1.1438 + rope(const _CharT *__s, const _CharT *__e,
1.1439 + const allocator_type& __a = allocator_type())
1.1440 + : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a))
1.1441 + { }
1.1442 +
1.1443 + rope(const const_iterator& __s, const const_iterator& __e,
1.1444 + const allocator_type& __a = allocator_type())
1.1445 + : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1.1446 + __e._M_current_pos))
1.1447 + { }
1.1448 +
1.1449 + rope(const iterator& __s, const iterator& __e,
1.1450 + const allocator_type& __a = allocator_type())
1.1451 + : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1.1452 + __e._M_current_pos))
1.1453 + { }
1.1454 +
1.1455 + rope(_CharT __c, const allocator_type& __a = allocator_type())
1.1456 + : _M_tree_ptr(__a, (_RopeRep*)0)
1.1457 + {
1.1458 + _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1.1459 +
1.1460 + _Construct(__buf, __c);
1.1461 + _STLP_TRY {
1.1462 + _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1.1463 + }
1.1464 + _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1.1465 + }
1.1466 +
1.1467 + rope(size_t __n, _CharT __c,
1.1468 + const allocator_type& __a = allocator_type()):
1.1469 + _M_tree_ptr(__a, (_RopeRep*)0) {
1.1470 + rope<_CharT,_Alloc> __result;
1.1471 +# define __exponentiate_threshold size_t(32)
1.1472 + _RopeRep* __remainder;
1.1473 + rope<_CharT,_Alloc> __remainder_rope;
1.1474 +
1.1475 + // gcc-2.7.2 bugs
1.1476 + typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1.1477 +
1.1478 + if (0 == __n)
1.1479 + return;
1.1480 +
1.1481 + size_t __exponent = __n / __exponentiate_threshold;
1.1482 + size_t __rest = __n % __exponentiate_threshold;
1.1483 + if (0 == __rest) {
1.1484 + __remainder = 0;
1.1485 + } else {
1.1486 + _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1.1487 + uninitialized_fill_n(__rest_buffer, __rest, __c);
1.1488 + _S_cond_store_eos(__rest_buffer[__rest]);
1.1489 + _STLP_TRY {
1.1490 + __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1.1491 + }
1.1492 + _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1.1493 + }
1.1494 + __remainder_rope._M_tree_ptr._M_data = __remainder;
1.1495 + if (__exponent != 0) {
1.1496 + _CharT* __base_buffer =
1.1497 + _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1.1498 + _RopeLeaf* __base_leaf;
1.1499 + rope<_CharT,_Alloc> __base_rope;
1.1500 + uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1.1501 + _S_cond_store_eos(__base_buffer[__exponentiate_threshold]);
1.1502 + _STLP_TRY {
1.1503 + __base_leaf = _S_new_RopeLeaf(__base_buffer,
1.1504 + __exponentiate_threshold, __a);
1.1505 + }
1.1506 + _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1.1507 + __exponentiate_threshold, __a))
1.1508 + __base_rope._M_tree_ptr._M_data = __base_leaf;
1.1509 + if (1 == __exponent) {
1.1510 + __result = __base_rope;
1.1511 +# ifndef __GC
1.1512 + _STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1.1513 + // One each for base_rope and __result
1.1514 +# endif
1.1515 + } else {
1.1516 + __result = power(__base_rope, __exponent, _Concat_fn());
1.1517 + }
1.1518 + if (0 != __remainder) {
1.1519 + __result += __remainder_rope;
1.1520 + }
1.1521 + } else {
1.1522 + __result = __remainder_rope;
1.1523 + }
1.1524 + _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
1.1525 + _M_tree_ptr._M_data->_M_ref_nonnil();
1.1526 +# undef __exponentiate_threshold
1.1527 + }
1.1528 +
1.1529 + rope(const allocator_type& __a = allocator_type())
1.1530 + : _M_tree_ptr(__a, (_RopeRep*)0) {}
1.1531 +
1.1532 + // Construct a rope from a function that can compute its members
1.1533 + rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1.1534 + const allocator_type& __a = allocator_type())
1.1535 + : _M_tree_ptr(__a, (_RopeRep*)0)
1.1536 + {
1.1537 + _M_tree_ptr._M_data = (0 == __len) ?
1.1538 + 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1.1539 + }
1.1540 +
1.1541 + rope(const _Self& __x)
1.1542 + : _M_tree_ptr(__x.get_allocator(), __x._M_tree_ptr._M_data)
1.1543 + {
1.1544 + _S_ref(_M_tree_ptr._M_data);
1.1545 + }
1.1546 +
1.1547 + ~rope()
1.1548 + {
1.1549 + _S_unref(_M_tree_ptr._M_data);
1.1550 + }
1.1551 +
1.1552 + _Self& operator=(const _Self& __x)
1.1553 + {
1.1554 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1555 + _STLP_ASSERT(get_allocator() == __x.get_allocator())
1.1556 + _M_tree_ptr._M_data = __x._M_tree_ptr._M_data;
1.1557 + _S_ref(_M_tree_ptr._M_data);
1.1558 + _S_unref(__old);
1.1559 + return(*this);
1.1560 + }
1.1561 + void clear()
1.1562 + {
1.1563 + _S_unref(_M_tree_ptr._M_data);
1.1564 + _M_tree_ptr._M_data = 0;
1.1565 + }
1.1566 + void push_back(_CharT __x)
1.1567 + {
1.1568 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1569 + _M_tree_ptr._M_data = _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1);
1.1570 + _S_unref(__old);
1.1571 + }
1.1572 +
1.1573 + void pop_back()
1.1574 + {
1.1575 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1576 + _M_tree_ptr._M_data =
1.1577 + _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
1.1578 + _S_unref(__old);
1.1579 + }
1.1580 +
1.1581 + _CharT back() const
1.1582 + {
1.1583 + return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
1.1584 + }
1.1585 +
1.1586 + void push_front(_CharT __x)
1.1587 + {
1.1588 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1589 + _RopeRep* __left =
1.1590 + _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
1.1591 + _STLP_TRY {
1.1592 + _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
1.1593 + _S_unref(__old);
1.1594 + _S_unref(__left);
1.1595 + }
1.1596 + _STLP_UNWIND(_S_unref(__left))
1.1597 + }
1.1598 +
1.1599 + void pop_front()
1.1600 + {
1.1601 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1602 + _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
1.1603 + _S_unref(__old);
1.1604 + }
1.1605 +
1.1606 + _CharT front() const
1.1607 + {
1.1608 + return _S_fetch(_M_tree_ptr._M_data, 0);
1.1609 + }
1.1610 +
1.1611 + void balance()
1.1612 + {
1.1613 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1614 + _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
1.1615 + _S_unref(__old);
1.1616 + }
1.1617 +
1.1618 + void copy(_CharT* __buffer) const {
1.1619 + _STLP_STD::_Destroy(__buffer, __buffer + size());
1.1620 + _S_flatten(_M_tree_ptr._M_data, __buffer);
1.1621 + }
1.1622 +
1.1623 + // This is the copy function from the standard, but
1.1624 + // with the arguments reordered to make it consistent with the
1.1625 + // rest of the interface.
1.1626 + // Note that this guaranteed not to compile if the draft standard
1.1627 + // order is assumed.
1.1628 + size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1.1629 + {
1.1630 + size_t _p_size = size();
1.1631 + size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
1.1632 +
1.1633 + _STLP_STD::_Destroy(__buffer, __buffer + __len);
1.1634 + _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
1.1635 + return __len;
1.1636 + }
1.1637 +
1.1638 + // Print to stdout, exposing structure. May be useful for
1.1639 + // performance debugging.
1.1640 + void dump() {
1.1641 + _S_dump(_M_tree_ptr._M_data);
1.1642 + }
1.1643 +
1.1644 + // Convert to 0 terminated string in new allocated memory.
1.1645 + // Embedded 0s in the input do not terminate the copy.
1.1646 + const _CharT* c_str() const;
1.1647 +
1.1648 + // As above, but lso use the flattened representation as the
1.1649 + // the new rope representation.
1.1650 + const _CharT* replace_with_c_str();
1.1651 +
1.1652 + // Reclaim memory for the c_str generated flattened string.
1.1653 + // Intentionally undocumented, since it's hard to say when this
1.1654 + // is safe for multiple threads.
1.1655 + void delete_c_str () {
1.1656 + if (0 == _M_tree_ptr._M_data) return;
1.1657 + if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
1.1658 + ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
1.1659 + _M_tree_ptr._M_data->_M_c_string) {
1.1660 + // Representation shared
1.1661 + return;
1.1662 + }
1.1663 +# ifndef __GC
1.1664 + _M_tree_ptr._M_data->_M_free_c_string();
1.1665 +# endif
1.1666 + _M_tree_ptr._M_data->_M_c_string = 0;
1.1667 + }
1.1668 +
1.1669 + _CharT operator[] (size_type __pos) const {
1.1670 + return _S_fetch(_M_tree_ptr._M_data, __pos);
1.1671 + }
1.1672 +
1.1673 + _CharT at(size_type __pos) const {
1.1674 + // if (__pos >= size()) throw out_of_range; // XXX
1.1675 + return (*this)[__pos];
1.1676 + }
1.1677 +
1.1678 + const_iterator begin() const {
1.1679 + return(const_iterator(_M_tree_ptr._M_data, 0));
1.1680 + }
1.1681 +
1.1682 + // An easy way to get a const iterator from a non-const container.
1.1683 + const_iterator const_begin() const {
1.1684 + return(const_iterator(_M_tree_ptr._M_data, 0));
1.1685 + }
1.1686 +
1.1687 + const_iterator end() const {
1.1688 + return(const_iterator(_M_tree_ptr._M_data, size()));
1.1689 + }
1.1690 +
1.1691 + const_iterator const_end() const {
1.1692 + return(const_iterator(_M_tree_ptr._M_data, size()));
1.1693 + }
1.1694 +
1.1695 + size_type size() const {
1.1696 + return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
1.1697 + }
1.1698 +
1.1699 + size_type length() const {
1.1700 + return size();
1.1701 + }
1.1702 +
1.1703 + size_type max_size() const {
1.1704 + return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
1.1705 + // Guarantees that the result can be sufficirntly
1.1706 + // balanced. Longer ropes will probably still work,
1.1707 + // but it's harder to make guarantees.
1.1708 + }
1.1709 +
1.1710 + const_reverse_iterator rbegin() const {
1.1711 + return const_reverse_iterator(end());
1.1712 + }
1.1713 +
1.1714 + const_reverse_iterator const_rbegin() const {
1.1715 + return const_reverse_iterator(end());
1.1716 + }
1.1717 +
1.1718 + const_reverse_iterator rend() const {
1.1719 + return const_reverse_iterator(begin());
1.1720 + }
1.1721 +
1.1722 + const_reverse_iterator const_rend() const {
1.1723 + return const_reverse_iterator(begin());
1.1724 + }
1.1725 + // The symmetric cases are intentionally omitted, since they're presumed
1.1726 + // to be less common, and we don't handle them as well.
1.1727 +
1.1728 + // The following should really be templatized.
1.1729 + // The first argument should be an input iterator or
1.1730 + // forward iterator with value_type _CharT.
1.1731 + _Self& append(const _CharT* __iter, size_t __n) {
1.1732 + _RopeRep* __result =
1.1733 + _S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n);
1.1734 + _S_unref(_M_tree_ptr._M_data);
1.1735 + _M_tree_ptr._M_data = __result;
1.1736 + return *this;
1.1737 + }
1.1738 +
1.1739 + _Self& append(const _CharT* __c_string) {
1.1740 + size_t __len = _S_char_ptr_len(__c_string);
1.1741 + append(__c_string, __len);
1.1742 + return(*this);
1.1743 + }
1.1744 +
1.1745 + _Self& append(const _CharT* __s, const _CharT* __e) {
1.1746 + _RopeRep* __result =
1.1747 + _S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s);
1.1748 + _S_unref(_M_tree_ptr._M_data);
1.1749 + _M_tree_ptr._M_data = __result;
1.1750 + return *this;
1.1751 + }
1.1752 +
1.1753 + _Self& append(const_iterator __s, const_iterator __e) {
1.1754 + _STLP_ASSERT(__s._M_root == __e._M_root)
1.1755 + _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
1.1756 + _Self_destruct_ptr __appendee(_S_substring(
1.1757 + __s._M_root, __s._M_current_pos, __e._M_current_pos));
1.1758 + _RopeRep* __result =
1.1759 + _S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee);
1.1760 + _S_unref(_M_tree_ptr._M_data);
1.1761 + _M_tree_ptr._M_data = __result;
1.1762 + return *this;
1.1763 + }
1.1764 +
1.1765 + _Self& append(_CharT __c) {
1.1766 + _RopeRep* __result =
1.1767 + _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1);
1.1768 + _S_unref(_M_tree_ptr._M_data);
1.1769 + _M_tree_ptr._M_data = __result;
1.1770 + return *this;
1.1771 + }
1.1772 +
1.1773 + _Self& append() { return append(_CharT()); } // XXX why?
1.1774 +
1.1775 + _Self& append(const _Self& __y) {
1.1776 + _STLP_ASSERT(__y.get_allocator() == get_allocator())
1.1777 + _RopeRep* __result = _S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1.1778 + _S_unref(_M_tree_ptr._M_data);
1.1779 + _M_tree_ptr._M_data = __result;
1.1780 + return *this;
1.1781 + }
1.1782 +
1.1783 + _Self& append(size_t __n, _CharT __c) {
1.1784 + rope<_CharT,_Alloc> __last(__n, __c);
1.1785 + return append(__last);
1.1786 + }
1.1787 +
1.1788 + void swap(_Self& __b) {
1.1789 + _STLP_ASSERT(get_allocator() == __b.get_allocator())
1.1790 + _RopeRep* __tmp = _M_tree_ptr._M_data;
1.1791 + _M_tree_ptr._M_data = __b._M_tree_ptr._M_data;
1.1792 + __b._M_tree_ptr._M_data = __tmp;
1.1793 + }
1.1794 +
1.1795 +
1.1796 +protected:
1.1797 + // Result is included in refcount.
1.1798 + static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1.1799 + size_t __pos2, _RopeRep* __r) {
1.1800 + if (0 == __old) { _S_ref(__r); return __r; }
1.1801 + _Self_destruct_ptr __left(
1.1802 + _S_substring(__old, 0, __pos1));
1.1803 + _Self_destruct_ptr __right(
1.1804 + _S_substring(__old, __pos2, __old->_M_size._M_data));
1.1805 + _STLP_MPWFIX_TRY //*TY 06/01/2000 -
1.1806 + _RopeRep* __result;
1.1807 +
1.1808 + if (0 == __r) {
1.1809 + __result = _S_concat_rep(__left, __right);
1.1810 + } else {
1.1811 + _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
1.1812 + _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
1.1813 + __result = _S_concat_rep(__left_result, __right);
1.1814 + }
1.1815 + return __result;
1.1816 + _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
1.1817 + }
1.1818 +
1.1819 +public:
1.1820 + void insert(size_t __p, const _Self& __r) {
1.1821 + _RopeRep* __result =
1.1822 + replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data);
1.1823 + _STLP_ASSERT(get_allocator() == __r.get_allocator())
1.1824 + _S_unref(_M_tree_ptr._M_data);
1.1825 + _M_tree_ptr._M_data = __result;
1.1826 + }
1.1827 +
1.1828 + void insert(size_t __p, size_t __n, _CharT __c) {
1.1829 + rope<_CharT,_Alloc> __r(__n,__c);
1.1830 + insert(__p, __r);
1.1831 + }
1.1832 +
1.1833 + void insert(size_t __p, const _CharT* __i, size_t __n) {
1.1834 + _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
1.1835 + _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
1.1836 + _Self_destruct_ptr __left_result(
1.1837 + _S_concat_char_iter(__left, __i, __n));
1.1838 + // _S_ destr_concat_char_iter should be safe here.
1.1839 + // But as it stands it's probably not a win, since __left
1.1840 + // is likely to have additional references.
1.1841 + _RopeRep* __result = _S_concat_rep(__left_result, __right);
1.1842 + _S_unref(_M_tree_ptr._M_data);
1.1843 + _M_tree_ptr._M_data = __result;
1.1844 + }
1.1845 +
1.1846 + void insert(size_t __p, const _CharT* __c_string) {
1.1847 + insert(__p, __c_string, _S_char_ptr_len(__c_string));
1.1848 + }
1.1849 +
1.1850 + void insert(size_t __p, _CharT __c) {
1.1851 + insert(__p, &__c, 1);
1.1852 + }
1.1853 +
1.1854 + void insert(size_t __p) {
1.1855 + _CharT __c = _CharT();
1.1856 + insert(__p, &__c, 1);
1.1857 + }
1.1858 +
1.1859 + void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1.1860 + _Self __r(__i, __j);
1.1861 + insert(__p, __r);
1.1862 + }
1.1863 +
1.1864 + void insert(size_t __p, const const_iterator& __i,
1.1865 + const const_iterator& __j) {
1.1866 + _Self __r(__i, __j);
1.1867 + insert(__p, __r);
1.1868 + }
1.1869 +
1.1870 + void insert(size_t __p, const iterator& __i,
1.1871 + const iterator& __j) {
1.1872 + _Self __r(__i, __j);
1.1873 + insert(__p, __r);
1.1874 + }
1.1875 +
1.1876 + // (position, length) versions of replace operations:
1.1877 +
1.1878 + void replace(size_t __p, size_t __n, const _Self& __r) {
1.1879 + _RopeRep* __result =
1.1880 + replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data);
1.1881 + _S_unref(_M_tree_ptr._M_data);
1.1882 + _M_tree_ptr._M_data = __result;
1.1883 + }
1.1884 +
1.1885 + void replace(size_t __p, size_t __n,
1.1886 + const _CharT* __i, size_t __i_len) {
1.1887 + _Self __r(__i, __i_len);
1.1888 + replace(__p, __n, __r);
1.1889 + }
1.1890 +
1.1891 + void replace(size_t __p, size_t __n, _CharT __c) {
1.1892 + _Self __r(__c);
1.1893 + replace(__p, __n, __r);
1.1894 + }
1.1895 +
1.1896 + void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1.1897 + _Self __r(__c_string);
1.1898 + replace(__p, __n, __r);
1.1899 + }
1.1900 +
1.1901 + void replace(size_t __p, size_t __n,
1.1902 + const _CharT* __i, const _CharT* __j) {
1.1903 + _Self __r(__i, __j);
1.1904 + replace(__p, __n, __r);
1.1905 + }
1.1906 +
1.1907 + void replace(size_t __p, size_t __n,
1.1908 + const const_iterator& __i, const const_iterator& __j) {
1.1909 + _Self __r(__i, __j);
1.1910 + replace(__p, __n, __r);
1.1911 + }
1.1912 +
1.1913 + void replace(size_t __p, size_t __n,
1.1914 + const iterator& __i, const iterator& __j) {
1.1915 + _Self __r(__i, __j);
1.1916 + replace(__p, __n, __r);
1.1917 + }
1.1918 +
1.1919 + // Single character variants:
1.1920 + void replace(size_t __p, _CharT __c) {
1.1921 + iterator __i(this, __p);
1.1922 + *__i = __c;
1.1923 + }
1.1924 +
1.1925 + void replace(size_t __p, const _Self& __r) {
1.1926 + replace(__p, 1, __r);
1.1927 + }
1.1928 +
1.1929 + void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1.1930 + replace(__p, 1, __i, __i_len);
1.1931 + }
1.1932 +
1.1933 + void replace(size_t __p, const _CharT* __c_string) {
1.1934 + replace(__p, 1, __c_string);
1.1935 + }
1.1936 +
1.1937 + void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1.1938 + replace(__p, 1, __i, __j);
1.1939 + }
1.1940 +
1.1941 + void replace(size_t __p, const const_iterator& __i,
1.1942 + const const_iterator& __j) {
1.1943 + replace(__p, 1, __i, __j);
1.1944 + }
1.1945 +
1.1946 + void replace(size_t __p, const iterator& __i,
1.1947 + const iterator& __j) {
1.1948 + replace(__p, 1, __i, __j);
1.1949 + }
1.1950 +
1.1951 + // Erase, (position, size) variant.
1.1952 + void erase(size_t __p, size_t __n) {
1.1953 + _RopeRep* __result = replace(_M_tree_ptr._M_data, __p, __p + __n, 0);
1.1954 + _S_unref(_M_tree_ptr._M_data);
1.1955 + _M_tree_ptr._M_data = __result;
1.1956 + }
1.1957 +
1.1958 + // Erase, single character
1.1959 + void erase(size_t __p) {
1.1960 + erase(__p, __p + 1);
1.1961 + }
1.1962 +
1.1963 + // Insert, iterator variants.
1.1964 + iterator insert(const iterator& __p, const _Self& __r)
1.1965 + { insert(__p.index(), __r); return __p; }
1.1966 + iterator insert(const iterator& __p, size_t __n, _CharT __c)
1.1967 + { insert(__p.index(), __n, __c); return __p; }
1.1968 + iterator insert(const iterator& __p, _CharT __c)
1.1969 + { insert(__p.index(), __c); return __p; }
1.1970 + iterator insert(const iterator& __p )
1.1971 + { insert(__p.index()); return __p; }
1.1972 + iterator insert(const iterator& __p, const _CharT* c_string)
1.1973 + { insert(__p.index(), c_string); return __p; }
1.1974 + iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1.1975 + { insert(__p.index(), __i, __n); return __p; }
1.1976 + iterator insert(const iterator& __p, const _CharT* __i,
1.1977 + const _CharT* __j)
1.1978 + { insert(__p.index(), __i, __j); return __p; }
1.1979 + iterator insert(const iterator& __p,
1.1980 + const const_iterator& __i, const const_iterator& __j)
1.1981 + { insert(__p.index(), __i, __j); return __p; }
1.1982 + iterator insert(const iterator& __p,
1.1983 + const iterator& __i, const iterator& __j)
1.1984 + { insert(__p.index(), __i, __j); return __p; }
1.1985 +
1.1986 + // Replace, range variants.
1.1987 + void replace(const iterator& __p, const iterator& __q,
1.1988 + const _Self& __r)
1.1989 + { replace(__p.index(), __q.index() - __p.index(), __r); }
1.1990 + void replace(const iterator& __p, const iterator& __q, _CharT __c)
1.1991 + { replace(__p.index(), __q.index() - __p.index(), __c); }
1.1992 + void replace(const iterator& __p, const iterator& __q,
1.1993 + const _CharT* __c_string)
1.1994 + { replace(__p.index(), __q.index() - __p.index(), __c_string); }
1.1995 + void replace(const iterator& __p, const iterator& __q,
1.1996 + const _CharT* __i, size_t __n)
1.1997 + { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
1.1998 + void replace(const iterator& __p, const iterator& __q,
1.1999 + const _CharT* __i, const _CharT* __j)
1.2000 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.2001 + void replace(const iterator& __p, const iterator& __q,
1.2002 + const const_iterator& __i, const const_iterator& __j)
1.2003 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.2004 + void replace(const iterator& __p, const iterator& __q,
1.2005 + const iterator& __i, const iterator& __j)
1.2006 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.2007 +
1.2008 + // Replace, iterator variants.
1.2009 + void replace(const iterator& __p, const _Self& __r)
1.2010 + { replace(__p.index(), __r); }
1.2011 + void replace(const iterator& __p, _CharT __c)
1.2012 + { replace(__p.index(), __c); }
1.2013 + void replace(const iterator& __p, const _CharT* __c_string)
1.2014 + { replace(__p.index(), __c_string); }
1.2015 + void replace(const iterator& __p, const _CharT* __i, size_t __n)
1.2016 + { replace(__p.index(), __i, __n); }
1.2017 + void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
1.2018 + { replace(__p.index(), __i, __j); }
1.2019 + void replace(const iterator& __p, const_iterator __i,
1.2020 + const_iterator __j)
1.2021 + { replace(__p.index(), __i, __j); }
1.2022 + void replace(const iterator& __p, iterator __i, iterator __j)
1.2023 + { replace(__p.index(), __i, __j); }
1.2024 +
1.2025 + // Iterator and range variants of erase
1.2026 + iterator erase(const iterator& __p, const iterator& __q) {
1.2027 + size_t __p_index = __p.index();
1.2028 + erase(__p_index, __q.index() - __p_index);
1.2029 + return iterator(this, __p_index);
1.2030 + }
1.2031 + iterator erase(const iterator& __p) {
1.2032 + size_t __p_index = __p.index();
1.2033 + erase(__p_index, 1);
1.2034 + return iterator(this, __p_index);
1.2035 + }
1.2036 +
1.2037 + _Self substr(size_t __start, size_t __len = 1) const {
1.2038 + return rope<_CharT,_Alloc>(
1.2039 + _S_substring(_M_tree_ptr._M_data, __start, __start + __len));
1.2040 + }
1.2041 +
1.2042 + _Self substr(iterator __start, iterator __end) const {
1.2043 + return rope<_CharT,_Alloc>(
1.2044 + _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1.2045 + }
1.2046 +
1.2047 + _Self substr(iterator __start) const {
1.2048 + size_t __pos = __start.index();
1.2049 + return rope<_CharT,_Alloc>(
1.2050 + _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1.2051 + }
1.2052 +
1.2053 + _Self substr(const_iterator __start, const_iterator __end) const {
1.2054 + // This might eventually take advantage of the cache in the
1.2055 + // iterator.
1.2056 + return rope<_CharT,_Alloc>(
1.2057 + _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1.2058 + }
1.2059 +
1.2060 + rope<_CharT,_Alloc> substr(const_iterator __start) {
1.2061 + size_t __pos = __start.index();
1.2062 + return rope<_CharT,_Alloc>(
1.2063 + _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1.2064 + }
1.2065 +
1.2066 + enum { npos = -1 };
1.2067 +
1.2068 + // static const size_type npos;
1.2069 +
1.2070 + size_type find(_CharT __c, size_type __pos = 0) const;
1.2071 + size_type find(const _CharT* __s, size_type __pos = 0) const {
1.2072 + size_type __result_pos;
1.2073 + const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
1.2074 + __s, __s + _S_char_ptr_len(__s));
1.2075 + __result_pos = __result.index();
1.2076 +# ifndef _STLP_OLD_ROPE_SEMANTICS
1.2077 + if (__result_pos == size()) __result_pos = npos;
1.2078 +# endif
1.2079 + return __result_pos;
1.2080 + }
1.2081 +
1.2082 + iterator mutable_begin() {
1.2083 + return(iterator(this, 0));
1.2084 + }
1.2085 +
1.2086 + iterator mutable_end() {
1.2087 + return(iterator(this, size()));
1.2088 + }
1.2089 +
1.2090 + reverse_iterator mutable_rbegin() {
1.2091 + return reverse_iterator(mutable_end());
1.2092 + }
1.2093 +
1.2094 + reverse_iterator mutable_rend() {
1.2095 + return reverse_iterator(mutable_begin());
1.2096 + }
1.2097 +
1.2098 + reference mutable_reference_at(size_type __pos) {
1.2099 + return reference(this, __pos);
1.2100 + }
1.2101 +
1.2102 +# ifdef __STD_STUFF
1.2103 + reference operator[] (size_type __pos) {
1.2104 + return reference(this, __pos);
1.2105 + }
1.2106 +
1.2107 + reference at(size_type __pos) {
1.2108 + // if (__pos >= size()) throw out_of_range; // XXX
1.2109 + return (*this)[__pos];
1.2110 + }
1.2111 +
1.2112 + void resize(size_type, _CharT) {}
1.2113 + void resize(size_type) {}
1.2114 + void reserve(size_type = 0) {}
1.2115 + size_type capacity() const {
1.2116 + return max_size();
1.2117 + }
1.2118 +
1.2119 + // Stuff below this line is dangerous because it's error prone.
1.2120 + // I would really like to get rid of it.
1.2121 + // copy function with funny arg ordering.
1.2122 + size_type copy(_CharT* __buffer, size_type __n,
1.2123 + size_type __pos = 0) const {
1.2124 + return copy(__pos, __n, __buffer);
1.2125 + }
1.2126 +
1.2127 + iterator end() { return mutable_end(); }
1.2128 +
1.2129 + iterator begin() { return mutable_begin(); }
1.2130 +
1.2131 + reverse_iterator rend() { return mutable_rend(); }
1.2132 +
1.2133 + reverse_iterator rbegin() { return mutable_rbegin(); }
1.2134 +
1.2135 +# else
1.2136 +
1.2137 + const_iterator end() { return const_end(); }
1.2138 +
1.2139 + const_iterator begin() { return const_begin(); }
1.2140 +
1.2141 + const_reverse_iterator rend() { return const_rend(); }
1.2142 +
1.2143 + const_reverse_iterator rbegin() { return const_rbegin(); }
1.2144 +
1.2145 +# endif
1.2146 +
1.2147 + __ROPE_DEFINE_ALLOCS(_Alloc, _M_tree_ptr)
1.2148 + };
1.2149 +
1.2150 +# undef __ROPE_DEFINE_ALLOC
1.2151 +# undef __ROPE_DEFINE_ALLOCS
1.2152 +
1.2153 +template <class _CharT, class _Alloc>
1.2154 +inline _CharT
1.2155 +_Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
1.2156 +{
1.2157 + return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n);
1.2158 +}
1.2159 +
1.2160 +template <class _CharT, class _Alloc>
1.2161 +inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2162 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2163 + return (__x._M_current_pos == __y._M_current_pos &&
1.2164 + __x._M_root == __y._M_root);
1.2165 +}
1.2166 +
1.2167 +template <class _CharT, class _Alloc>
1.2168 +inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2169 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2170 + return (__x._M_current_pos < __y._M_current_pos);
1.2171 +}
1.2172 +
1.2173 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2174 +
1.2175 +template <class _CharT, class _Alloc>
1.2176 +inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2177 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2178 + return !(__x == __y);
1.2179 +}
1.2180 +
1.2181 +template <class _CharT, class _Alloc>
1.2182 +inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2183 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2184 + return __y < __x;
1.2185 +}
1.2186 +
1.2187 +template <class _CharT, class _Alloc>
1.2188 +inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2189 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2190 + return !(__y < __x);
1.2191 +}
1.2192 +
1.2193 +template <class _CharT, class _Alloc>
1.2194 +inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2195 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2196 + return !(__x < __y);
1.2197 +}
1.2198 +
1.2199 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2200 +
1.2201 +template <class _CharT, class _Alloc>
1.2202 +inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2203 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2204 + return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
1.2205 +}
1.2206 +
1.2207 +#if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
1.2208 +template <class _CharT, class _Alloc>
1.2209 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2210 +operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
1.2211 + return _Rope_const_iterator<_CharT,_Alloc>(
1.2212 + __x._M_root, __x._M_current_pos - __n);
1.2213 +}
1.2214 +# endif
1.2215 +
1.2216 +template <class _CharT, class _Alloc>
1.2217 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2218 +operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
1.2219 + return _Rope_const_iterator<_CharT,_Alloc>(
1.2220 + __x._M_root, __x._M_current_pos + __n);
1.2221 +}
1.2222 +
1.2223 +template <class _CharT, class _Alloc>
1.2224 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2225 +operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
1.2226 + return _Rope_const_iterator<_CharT,_Alloc>(
1.2227 + __x._M_root, __x._M_current_pos + __n);
1.2228 +}
1.2229 +
1.2230 +template <class _CharT, class _Alloc>
1.2231 +inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2232 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2233 + return (__x._M_current_pos == __y._M_current_pos &&
1.2234 + __x._M_root_rope == __y._M_root_rope);
1.2235 +}
1.2236 +
1.2237 +template <class _CharT, class _Alloc>
1.2238 +inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2239 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2240 + return (__x._M_current_pos < __y._M_current_pos);
1.2241 +}
1.2242 +
1.2243 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2244 +
1.2245 +template <class _CharT, class _Alloc>
1.2246 +inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2247 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2248 + return !(__x == __y);
1.2249 +}
1.2250 +
1.2251 +template <class _CharT, class _Alloc>
1.2252 +inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2253 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2254 + return __y < __x;
1.2255 +}
1.2256 +
1.2257 +template <class _CharT, class _Alloc>
1.2258 +inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2259 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2260 + return !(__y < __x);
1.2261 +}
1.2262 +
1.2263 +template <class _CharT, class _Alloc>
1.2264 +inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2265 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2266 + return !(__x < __y);
1.2267 +}
1.2268 +
1.2269 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2270 +
1.2271 +template <class _CharT, class _Alloc>
1.2272 +inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2273 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2274 + return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
1.2275 +}
1.2276 +
1.2277 +#if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
1.2278 +template <class _CharT, class _Alloc>
1.2279 +inline _Rope_iterator<_CharT,_Alloc>
1.2280 +operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2281 + ptrdiff_t __n) {
1.2282 + return _Rope_iterator<_CharT,_Alloc>(
1.2283 + __x._M_root_rope, __x._M_current_pos - __n);
1.2284 +}
1.2285 +# endif
1.2286 +
1.2287 +template <class _CharT, class _Alloc>
1.2288 +inline _Rope_iterator<_CharT,_Alloc>
1.2289 +operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2290 + ptrdiff_t __n) {
1.2291 + return _Rope_iterator<_CharT,_Alloc>(
1.2292 + __x._M_root_rope, __x._M_current_pos + __n);
1.2293 +}
1.2294 +
1.2295 +template <class _CharT, class _Alloc>
1.2296 +inline _Rope_iterator<_CharT,_Alloc>
1.2297 +operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
1.2298 + return _Rope_iterator<_CharT,_Alloc>(
1.2299 + __x._M_root_rope, __x._M_current_pos + __n);
1.2300 +}
1.2301 +
1.2302 +template <class _CharT, class _Alloc>
1.2303 +inline
1.2304 +rope<_CharT,_Alloc>
1.2305 +operator+ (const rope<_CharT,_Alloc>& __left,
1.2306 + const rope<_CharT,_Alloc>& __right)
1.2307 +{
1.2308 + _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
1.2309 + return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
1.2310 + // Inlining this should make it possible to keep __left and
1.2311 + // __right in registers.
1.2312 +}
1.2313 +
1.2314 +template <class _CharT, class _Alloc>
1.2315 +inline
1.2316 +rope<_CharT,_Alloc>&
1.2317 +operator+= (rope<_CharT,_Alloc>& __left,
1.2318 + const rope<_CharT,_Alloc>& __right)
1.2319 +{
1.2320 + __left.append(__right);
1.2321 + return __left;
1.2322 +}
1.2323 +
1.2324 +template <class _CharT, class _Alloc>
1.2325 +inline
1.2326 +rope<_CharT,_Alloc>
1.2327 +operator+ (const rope<_CharT,_Alloc>& __left,
1.2328 + const _CharT* __right) {
1.2329 + size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
1.2330 + return rope<_CharT,_Alloc>(
1.2331 + rope<_CharT,_Alloc>::_S_concat_char_iter(
1.2332 + __left._M_tree_ptr._M_data, __right, __rlen));
1.2333 +}
1.2334 +
1.2335 +template <class _CharT, class _Alloc>
1.2336 +inline
1.2337 +rope<_CharT,_Alloc>&
1.2338 +operator+= (rope<_CharT,_Alloc>& __left,
1.2339 + const _CharT* __right) {
1.2340 + __left.append(__right);
1.2341 + return __left;
1.2342 +}
1.2343 +
1.2344 +template <class _CharT, class _Alloc>
1.2345 +inline
1.2346 +rope<_CharT,_Alloc>
1.2347 +operator+ (const rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
1.2348 + return rope<_CharT,_Alloc>(
1.2349 + rope<_CharT,_Alloc>::_S_concat_char_iter(
1.2350 + __left._M_tree_ptr._M_data, &__right, 1));
1.2351 +}
1.2352 +
1.2353 +template <class _CharT, class _Alloc>
1.2354 +inline
1.2355 +rope<_CharT,_Alloc>&
1.2356 +operator+= (rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
1.2357 + __left.append(__right);
1.2358 + return __left;
1.2359 +}
1.2360 +
1.2361 +template <class _CharT, class _Alloc>
1.2362 +inline bool
1.2363 +operator< (const rope<_CharT,_Alloc>& __left,
1.2364 + const rope<_CharT,_Alloc>& __right) {
1.2365 + return __left.compare(__right) < 0;
1.2366 +}
1.2367 +
1.2368 +template <class _CharT, class _Alloc>
1.2369 +inline bool
1.2370 +operator== (const rope<_CharT,_Alloc>& __left,
1.2371 + const rope<_CharT,_Alloc>& __right) {
1.2372 + return __left.compare(__right) == 0;
1.2373 +}
1.2374 +
1.2375 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2376 +
1.2377 +template <class _CharT, class _Alloc>
1.2378 +inline bool
1.2379 +operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2380 + return !(__x == __y);
1.2381 +}
1.2382 +
1.2383 +template <class _CharT, class _Alloc>
1.2384 +inline bool
1.2385 +operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2386 + return __y < __x;
1.2387 +}
1.2388 +
1.2389 +template <class _CharT, class _Alloc>
1.2390 +inline bool
1.2391 +operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2392 + return !(__y < __x);
1.2393 +}
1.2394 +
1.2395 +template <class _CharT, class _Alloc>
1.2396 +inline bool
1.2397 +operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2398 + return !(__x < __y);
1.2399 +}
1.2400 +
1.2401 +template <class _CharT, class _Alloc>
1.2402 +inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
1.2403 + const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
1.2404 + return !(__x == __y);
1.2405 +}
1.2406 +
1.2407 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2408 +
1.2409 +template <class _CharT, class _Alloc>
1.2410 +inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
1.2411 + const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
1.2412 + return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
1.2413 +}
1.2414 +
1.2415 +#ifdef _STLP_USE_NEW_IOSTREAMS
1.2416 +template<class _CharT, class _Traits, class _Alloc>
1.2417 +basic_ostream<_CharT, _Traits>& operator<< (
1.2418 + basic_ostream<_CharT, _Traits>& __o,
1.2419 + const rope<_CharT, _Alloc>& __r);
1.2420 +#elif ! defined (_STLP_USE_NO_IOSTREAMS)
1.2421 +template<class _CharT, class _Alloc>
1.2422 +ostream& operator<< (ostream& __o, const rope<_CharT,_Alloc>& __r);
1.2423 +#endif
1.2424 +
1.2425 +typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
1.2426 +# ifdef _STLP_HAS_WCHAR_T
1.2427 +typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
1.2428 +# endif
1.2429 +
1.2430 +inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
1.2431 +{
1.2432 + return __c.mutable_reference_at(__i);
1.2433 +}
1.2434 +
1.2435 +# ifdef _STLP_HAS_WCHAR_T
1.2436 +inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
1.2437 +{
1.2438 + return __c.mutable_reference_at(__i);
1.2439 +}
1.2440 +# endif
1.2441 +
1.2442 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
1.2443 +
1.2444 +template <class _CharT, class _Alloc>
1.2445 +inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
1.2446 + __x.swap(__y);
1.2447 +}
1.2448 +#else
1.2449 +
1.2450 +inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
1.2451 +# ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
1.2452 +inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
1.2453 +# endif
1.2454 +
1.2455 +#endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
1.2456 +
1.2457 +
1.2458 +// Hash functions should probably be revisited later:
1.2459 +_STLP_TEMPLATE_NULL struct hash<crope>
1.2460 +{
1.2461 + size_t operator()(const crope& __str) const
1.2462 + {
1.2463 + size_t _p_size = __str.size();
1.2464 +
1.2465 + if (0 == _p_size) return 0;
1.2466 + return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
1.2467 + }
1.2468 +};
1.2469 +
1.2470 +# ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
1.2471 +_STLP_TEMPLATE_NULL struct hash<wrope>
1.2472 +{
1.2473 + size_t operator()(const wrope& __str) const
1.2474 + {
1.2475 + size_t _p_size = __str.size();
1.2476 +
1.2477 + if (0 == _p_size) return 0;
1.2478 + return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
1.2479 + }
1.2480 +};
1.2481 +#endif
1.2482 +
1.2483 +#ifndef _STLP_MSVC
1.2484 +// I couldn't get this to work with VC++
1.2485 +template<class _CharT,class _Alloc>
1.2486 +void
1.2487 +_Rope_rotate(_Rope_iterator<_CharT,_Alloc> __first,
1.2488 + _Rope_iterator<_CharT,_Alloc> __middle,
1.2489 + _Rope_iterator<_CharT,_Alloc> __last);
1.2490 +
1.2491 +#if !defined(__GNUC__)
1.2492 +// Appears to confuse g++
1.2493 +inline void rotate(_Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __first,
1.2494 + _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __middle,
1.2495 + _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __last) {
1.2496 + _Rope_rotate(__first, __middle, __last);
1.2497 +}
1.2498 +#endif
1.2499 +
1.2500 +#endif
1.2501 +
1.2502 +template <class _CharT, class _Alloc>
1.2503 +inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const
1.2504 +{
1.2505 + if (_M_current_valid) {
1.2506 + return _M_current;
1.2507 + } else {
1.2508 + return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
1.2509 + }
1.2510 +}
1.2511 +_STLP_END_NAMESPACE
1.2512 +
1.2513 +# if !defined (_STLP_LINK_TIME_INSTANTIATION)
1.2514 +# include <stl/_rope.c>
1.2515 +# endif
1.2516 +
1.2517 +# endif /* _STLP_INTERNAL_ROPE_H */
1.2518 +
1.2519 +// Local Variables:
1.2520 +// mode:C++
1.2521 +// End: