1.1 --- a/epoc32/include/stdapis/stlport/stl/_rope.h Tue Nov 24 13:55:44 2009 +0000
1.2 +++ b/epoc32/include/stdapis/stlport/stl/_rope.h Tue Mar 16 16:12:26 2010 +0000
1.3 @@ -1,1 +1,2518 @@
1.4 -_rope.h
1.5 +/*
1.6 + * © Portions copyright (c) 2006-2007 Nokia Corporation. All rights reserved.
1.7 + * Copyright (c) 1996,1997
1.8 + * Silicon Graphics Computer Systems, Inc.
1.9 + *
1.10 + * Copyright (c) 1997
1.11 + * Moscow Center for SPARC Technology
1.12 + *
1.13 + * Copyright (c) 1999
1.14 + * Boris Fomitchev
1.15 + *
1.16 + * This material is provided "as is", with absolutely no warranty expressed
1.17 + * or implied. Any use is at your own risk.
1.18 + *
1.19 + * Permission to use or copy this software for any purpose is hereby granted
1.20 + * without fee, provided the above notices are retained on all copies.
1.21 + * Permission to modify the code and to distribute modified code is granted,
1.22 + * provided the above notices are retained, and a notice that the code was
1.23 + * modified is included with the above copyright notice.
1.24 + *
1.25 + */
1.26 +
1.27 +/* NOTE: This is an internal header file, included by other STL headers.
1.28 + * You should not attempt to use it directly.
1.29 + */
1.30 +
1.31 +// rope<_CharT,_Alloc> is a sequence of _CharT.
1.32 +// Ropes appear to be mutable, but update operations
1.33 +// really copy enough of the data structure to leave the original
1.34 +// valid. Thus ropes can be logically copied by just copying
1.35 +// a pointer value.
1.36 +
1.37 +#ifndef _STLP_INTERNAL_ROPE_H
1.38 +# define _STLP_INTERNAL_ROPE_H
1.39 +
1.40 +# ifndef _STLP_INTERNAL_ALGOBASE_H
1.41 +# include <stl/_algobase.h>
1.42 +# endif
1.43 +
1.44 +# ifndef _STLP_IOSFWD
1.45 +# include <iosfwd>
1.46 +# endif
1.47 +
1.48 +# ifndef _STLP_INTERNAL_ALLOC_H
1.49 +# include <stl/_alloc.h>
1.50 +# endif
1.51 +
1.52 +# ifndef _STLP_INTERNAL_ITERATOR_H
1.53 +# include <stl/_iterator.h>
1.54 +# endif
1.55 +
1.56 +# ifndef _STLP_INTERNAL_ALGO_H
1.57 +# include <stl/_algo.h>
1.58 +# endif
1.59 +
1.60 +# ifndef _STLP_INTERNAL_FUNCTION_H
1.61 +# include <stl/_function.h>
1.62 +# endif
1.63 +
1.64 +# ifndef _STLP_INTERNAL_NUMERIC_H
1.65 +# include <stl/_numeric.h>
1.66 +# endif
1.67 +
1.68 +# ifndef _STLP_INTERNAL_HASH_FUN_H
1.69 +# include <stl/_hash_fun.h>
1.70 +# endif
1.71 +
1.72 +# ifdef __GC
1.73 +# define __GC_CONST const
1.74 +# else
1.75 +# include <stl/_threads.h>
1.76 +# define __GC_CONST // constant except for deallocation
1.77 +# endif
1.78 +# ifdef _STLP_SGI_THREADS
1.79 +# include <mutex.h>
1.80 +# endif
1.81 +
1.82 +#ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
1.83 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
1.84 +#elif defined(__MRC__)||defined(__SC__)
1.85 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
1.86 +#else
1.87 +# define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
1.88 +#endif
1.89 +
1.90 +_STLP_BEGIN_NAMESPACE
1.91 +
1.92 +// First a lot of forward declarations. The standard seems to require
1.93 +// much stricter "declaration before use" than many of the implementations
1.94 +// that preceded it.
1.95 +template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
1.96 +template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
1.97 +template<class _CharT, class _Alloc> struct _Rope_RopeRep;
1.98 +template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
1.99 +template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
1.100 +template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
1.101 +template<class _CharT, class _Alloc> class _Rope_iterator;
1.102 +template<class _CharT, class _Alloc> class _Rope_const_iterator;
1.103 +template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
1.104 +template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
1.105 +
1.106 +// Some helpers, so we can use power on ropes.
1.107 +// See below for why this isn't local to the implementation.
1.108 +
1.109 +// This uses a nonstandard refcount convention.
1.110 +// The result has refcount 0.
1.111 +template<class _CharT, class _Alloc>
1.112 +struct _Rope_Concat_fn
1.113 + : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
1.114 + rope<_CharT,_Alloc> > {
1.115 + rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
1.116 + const rope<_CharT,_Alloc>& __y) {
1.117 + return __x + __y;
1.118 + }
1.119 +};
1.120 +
1.121 +template <class _CharT, class _Alloc>
1.122 +inline
1.123 +rope<_CharT,_Alloc>
1.124 +__identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
1.125 +{
1.126 + return rope<_CharT,_Alloc>();
1.127 +}
1.128 +
1.129 +// The _S_eos function is used for those functions that
1.130 +// convert to/from C-like strings to detect the end of the string.
1.131 +
1.132 +// The end-of-C-string character.
1.133 +// This is what the draft standard says it should be.
1.134 +template <class _CharT>
1.135 +inline _CharT _S_eos(_CharT*) { return _CharT(); }
1.136 +
1.137 +// fbp : some compilers fail to zero-initialize builtins ;(
1.138 +inline const char _S_eos(const char*) { return 0; }
1.139 +# ifdef _STLP_HAS_WCHAR_T
1.140 +inline const wchar_t _S_eos(const wchar_t*) { return 0; }
1.141 +# endif
1.142 +
1.143 +// Test for basic character types.
1.144 +// For basic character types leaves having a trailing eos.
1.145 +template <class _CharT>
1.146 +inline bool _S_is_basic_char_type(_CharT*) { return false; }
1.147 +template <class _CharT>
1.148 +inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
1.149 +
1.150 +inline bool _S_is_basic_char_type(char*) { return true; }
1.151 +inline bool _S_is_one_byte_char_type(char*) { return true; }
1.152 +# ifdef _STLP_HAS_WCHAR_T
1.153 +inline bool _S_is_basic_char_type(wchar_t*) { return true; }
1.154 +# endif
1.155 +
1.156 +// Store an eos iff _CharT is a basic character type.
1.157 +// Do not reference _S_eos if it isn't.
1.158 +template <class _CharT>
1.159 +inline void _S_cond_store_eos(_CharT&) {}
1.160 +
1.161 +inline void _S_cond_store_eos(char& __c) { __c = 0; }
1.162 +# ifdef _STLP_HAS_WCHAR_T
1.163 +inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
1.164 +# endif
1.165 +
1.166 +// char_producers are logically functions that generate a section of
1.167 +// a string. These can be convereted to ropes. The resulting rope
1.168 +// invokes the char_producer on demand. This allows, for example,
1.169 +// files to be viewed as ropes without reading the entire file.
1.170 +template <class _CharT>
1.171 +class char_producer {
1.172 +public:
1.173 + virtual ~char_producer() {};
1.174 + virtual void operator()(size_t __start_pos, size_t __len,
1.175 + _CharT* __buffer) = 0;
1.176 + // Buffer should really be an arbitrary output iterator.
1.177 + // That way we could flatten directly into an ostream, etc.
1.178 + // This is thoroughly impossible, since iterator types don't
1.179 + // have runtime descriptions.
1.180 +};
1.181 +
1.182 +// Sequence buffers:
1.183 +//
1.184 +// Sequence must provide an append operation that appends an
1.185 +// array to the sequence. Sequence buffers are useful only if
1.186 +// appending an entire array is cheaper than appending element by element.
1.187 +// This is true for many string representations.
1.188 +// This should perhaps inherit from ostream<sequence::value_type>
1.189 +// and be implemented correspondingly, so that they can be used
1.190 +// for formatted. For the sake of portability, we don't do this yet.
1.191 +//
1.192 +// For now, sequence buffers behave as output iterators. But they also
1.193 +// behave a little like basic_ostringstream<sequence::value_type> and a
1.194 +// little like containers.
1.195 +
1.196 +template<class _Sequence
1.197 +# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
1.198 + defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
1.199 +, size_t _Buf_sz = 100
1.200 +# if defined(__sgi) && !defined(__GNUC__)
1.201 +# define __TYPEDEF_WORKAROUND
1.202 +,class _V = typename _Sequence::value_type
1.203 +# endif /* __sgi */
1.204 +# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.205 +>
1.206 +// The 3rd parameter works around a common compiler bug.
1.207 +class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
1.208 +public:
1.209 +# ifndef __TYPEDEF_WORKAROUND
1.210 + typedef typename _Sequence::value_type value_type;
1.211 + typedef sequence_buffer<_Sequence
1.212 +# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
1.213 + defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
1.214 + , _Buf_sz
1.215 + > _Self;
1.216 +# else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.217 + > _Self;
1.218 + enum { _Buf_sz = 100};
1.219 +# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
1.220 + // # endif
1.221 +# else /* __TYPEDEF_WORKAROUND */
1.222 + typedef _V value_type;
1.223 + typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
1.224 +# endif /* __TYPEDEF_WORKAROUND */
1.225 +protected:
1.226 + _Sequence* _M_prefix;
1.227 + value_type _M_buffer[_Buf_sz];
1.228 + size_t _M_buf_count;
1.229 +public:
1.230 + void flush() {
1.231 + _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
1.232 + _M_buf_count = 0;
1.233 + }
1.234 + ~sequence_buffer() { flush(); }
1.235 + sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
1.236 + sequence_buffer(const _Self& __x) {
1.237 + _M_prefix = __x._M_prefix;
1.238 + _M_buf_count = __x._M_buf_count;
1.239 + copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
1.240 + }
1.241 + sequence_buffer(_Self& __x) {
1.242 + __x.flush();
1.243 + _M_prefix = __x._M_prefix;
1.244 + _M_buf_count = 0;
1.245 + }
1.246 + sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
1.247 + _Self& operator= (_Self& __x) {
1.248 + __x.flush();
1.249 + _M_prefix = __x._M_prefix;
1.250 + _M_buf_count = 0;
1.251 + return *this;
1.252 + }
1.253 + _Self& operator= (const _Self& __x) {
1.254 + _M_prefix = __x._M_prefix;
1.255 + _M_buf_count = __x._M_buf_count;
1.256 + copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
1.257 + return *this;
1.258 + }
1.259 + void push_back(value_type __x)
1.260 + {
1.261 + if (_M_buf_count < _Buf_sz) {
1.262 + _M_buffer[_M_buf_count] = __x;
1.263 + ++_M_buf_count;
1.264 + } else {
1.265 + flush();
1.266 + _M_buffer[0] = __x;
1.267 + _M_buf_count = 1;
1.268 + }
1.269 + }
1.270 + void append(value_type* __s, size_t __len)
1.271 + {
1.272 + if (__len + _M_buf_count <= _Buf_sz) {
1.273 + size_t __i = _M_buf_count;
1.274 + size_t __j = 0;
1.275 + for (; __j < __len; __i++, __j++) {
1.276 + _M_buffer[__i] = __s[__j];
1.277 + }
1.278 + _M_buf_count += __len;
1.279 + } else if (0 == _M_buf_count) {
1.280 + _M_prefix->append(__s, __s + __len);
1.281 + } else {
1.282 + flush();
1.283 + append(__s, __len);
1.284 + }
1.285 + }
1.286 + _Self& write(value_type* __s, size_t __len)
1.287 + {
1.288 + append(__s, __len);
1.289 + return *this;
1.290 + }
1.291 + _Self& put(value_type __x)
1.292 + {
1.293 + push_back(__x);
1.294 + return *this;
1.295 + }
1.296 + _Self& operator=(const value_type& __rhs)
1.297 + {
1.298 + push_back(__rhs);
1.299 + return *this;
1.300 + }
1.301 + _Self& operator*() { return *this; }
1.302 + _Self& operator++() { return *this; }
1.303 + _Self& operator++(int) { return *this; }
1.304 +};
1.305 +
1.306 +// The following should be treated as private, at least for now.
1.307 +template<class _CharT>
1.308 +class _Rope_char_consumer {
1.309 +public:
1.310 + // If we had member templates, these should not be virtual.
1.311 + // For now we need to use run-time parametrization where
1.312 + // compile-time would do. _Hence this should all be private
1.313 + // for now.
1.314 + // The symmetry with char_producer is accidental and temporary.
1.315 + virtual ~_Rope_char_consumer() {};
1.316 + virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
1.317 +};
1.318 +
1.319 +//
1.320 +// What follows should really be local to rope. Unfortunately,
1.321 +// that doesn't work, since it makes it impossible to define generic
1.322 +// equality on rope iterators. According to the draft standard, the
1.323 +// template parameters for such an equality operator cannot be inferred
1.324 +// from the occurence of a member class as a parameter.
1.325 +// (SGI compilers in fact allow this, but the __result wouldn't be
1.326 +// portable.)
1.327 +// Similarly, some of the static member functions are member functions
1.328 +// only to avoid polluting the global namespace, and to circumvent
1.329 +// restrictions on type inference for template functions.
1.330 +//
1.331 +
1.332 +//
1.333 +// The internal data structure for representing a rope. This is
1.334 +// private to the implementation. A rope is really just a pointer
1.335 +// to one of these.
1.336 +//
1.337 +// A few basic functions for manipulating this data structure
1.338 +// are members of _RopeRep. Most of the more complex algorithms
1.339 +// are implemented as rope members.
1.340 +//
1.341 +// Some of the static member functions of _RopeRep have identically
1.342 +// named functions in rope that simply invoke the _RopeRep versions.
1.343 +//
1.344 +// A macro to introduce various allocation and deallocation functions
1.345 +// These need to be defined differently depending on whether or not
1.346 +// we are using standard conforming allocators, and whether the allocator
1.347 +// instances have real state. Thus this macro is invoked repeatedly
1.348 +// with different definitions of __ROPE_DEFINE_ALLOC.
1.349 +
1.350 +#if defined (_STLP_MEMBER_TEMPLATE_CLASSES)
1.351 +# define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy) \
1.352 + typedef typename \
1.353 + _Alloc_traits<_Tp,_Alloc>::allocator_type __name##Allocator;
1.354 +
1.355 +#define __ROPE_DEFINE_ALLOCS(__a, _M_proxy) \
1.356 + __ROPE_DEFINE_ALLOC(_CharT,_Data, _M_proxy) /* character data */ \
1.357 + typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
1.358 + __ROPE_DEFINE_ALLOC(__C,_C, _M_proxy) \
1.359 + typedef _Rope_RopeLeaf<_CharT,__a> __L; \
1.360 + __ROPE_DEFINE_ALLOC(__L,_L, _M_proxy) \
1.361 + typedef _Rope_RopeFunction<_CharT,__a> __F; \
1.362 + __ROPE_DEFINE_ALLOC(__F,_F, _M_proxy) \
1.363 + typedef _Rope_RopeSubstring<_CharT,__a> __S; \
1.364 + __ROPE_DEFINE_ALLOC(__S,_S,_M_proxy)
1.365 +#else
1.366 +#define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy)
1.367 +#define __ROPE_DEFINE_ALLOCS(__a, _M_proxy)
1.368 +#endif
1.369 +
1.370 +
1.371 +template<class _CharT, class _Alloc>
1.372 +struct _Rope_RopeRep
1.373 +# ifndef __GC
1.374 + : public _Refcount_Base
1.375 +# endif
1.376 +{
1.377 + typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
1.378 +public:
1.379 +# define __ROPE_MAX_DEPTH 45
1.380 +# define __ROPE_DEPTH_SIZE 46
1.381 + enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
1.382 + enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
1.383 + // Apparently needed by VC++
1.384 + // The data fields of leaves are allocated with some
1.385 + // extra space, to accomodate future growth and for basic
1.386 + // character types, to hold a trailing eos character.
1.387 + enum { _S_alloc_granularity = 8 };
1.388 +
1.389 +
1.390 + _Tag _M_tag:8;
1.391 + bool _M_is_balanced:8;
1.392 +
1.393 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.394 + typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type
1.395 + allocator_type;
1.396 +
1.397 + allocator_type get_allocator() const { return allocator_type(_M_size); }
1.398 +
1.399 + unsigned char _M_depth;
1.400 + __GC_CONST _CharT* _M_c_string;
1.401 + _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
1.402 +
1.403 +# ifdef _STLP_NO_ARROW_OPERATOR
1.404 + _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
1.405 +# endif
1.406 +
1.407 + /* Flattened version of string, if needed. */
1.408 + /* typically 0. */
1.409 + /* If it's not 0, then the memory is owned */
1.410 + /* by this node. */
1.411 + /* In the case of a leaf, this may point to */
1.412 + /* the same memory as the data field. */
1.413 + _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t _p_size,
1.414 + allocator_type __a) :
1.415 +# ifndef __GC
1.416 + _Refcount_Base(1),
1.417 +# endif
1.418 + _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
1.419 + { }
1.420 +# ifdef __GC
1.421 + void _M_incr () {}
1.422 +# endif
1.423 +
1.424 + // fbp : moved from RopeLeaf
1.425 + static size_t _S_rounded_up_size(size_t __n) {
1.426 + size_t __size_with_eos;
1.427 +
1.428 + if (_S_is_basic_char_type((_CharT*)0)) {
1.429 + __size_with_eos = __n + 1;
1.430 + } else {
1.431 + __size_with_eos = __n;
1.432 + }
1.433 +# ifdef __GC
1.434 + return __size_with_eos;
1.435 +# else
1.436 + // Allow slop for in-place expansion.
1.437 + return (__size_with_eos + _S_alloc_granularity-1)
1.438 + &~ (_S_alloc_granularity-1);
1.439 +# endif
1.440 + }
1.441 +
1.442 + static void _S_free_string(__GC_CONST _CharT* __s, size_t __len,
1.443 + allocator_type __a) {
1.444 +
1.445 + if (!_S_is_basic_char_type((_CharT*)0)) {
1.446 + _STLP_STD::_Destroy(__s, __s + __len);
1.447 + }
1.448 + // This has to be a static member, so this gets a bit messy
1.449 +# ifdef _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM
1.450 + __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
1.451 +# else
1.452 + __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
1.453 +# endif
1.454 + }
1.455 +
1.456 + // Deallocate data section of a leaf.
1.457 + // This shouldn't be a member function.
1.458 + // But its hard to do anything else at the
1.459 + // moment, because it's templatized w.r.t.
1.460 + // an allocator.
1.461 + // Does nothing if __GC is defined.
1.462 +# ifndef __GC
1.463 + void _M_free_c_string();
1.464 + void _M_free_tree();
1.465 + // Deallocate t. Assumes t is not 0.
1.466 + void _M_unref_nonnil()
1.467 + {
1.468 + _M_decr(); if (!_M_ref_count) _M_free_tree();
1.469 + }
1.470 + void _M_ref_nonnil()
1.471 + {
1.472 + _M_incr();
1.473 + }
1.474 + static void _S_unref(_Self* __t)
1.475 + {
1.476 + if (0 != __t) {
1.477 + __t->_M_unref_nonnil();
1.478 + }
1.479 + }
1.480 + static void _S_ref(_Self* __t)
1.481 + {
1.482 + if (0 != __t) __t->_M_incr();
1.483 + }
1.484 + static void _S_free_if_unref(_Self* __t)
1.485 + {
1.486 + if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
1.487 + }
1.488 +# else /* __GC */
1.489 + void _M_unref_nonnil() {}
1.490 + void _M_ref_nonnil() {}
1.491 + static void _S_unref(_Self*) {}
1.492 + static void _S_ref(_Self*) {}
1.493 + static void _S_free_if_unref(_Self*) {}
1.494 +# endif
1.495 +
1.496 + __ROPE_DEFINE_ALLOCS(_Alloc, _M_size)
1.497 + };
1.498 +
1.499 +template<class _CharT, class _Alloc>
1.500 +struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
1.501 +public:
1.502 + __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
1.503 + /* The allocated size is */
1.504 + /* _S_rounded_up_size(size), except */
1.505 + /* in the GC case, in which it */
1.506 + /* doesn't matter. */
1.507 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.508 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.509 + _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t _p_size, allocator_type __a)
1.510 + : _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_leaf, 0, true, _p_size, __a),
1.511 + _M_data(__d)
1.512 + {
1.513 + _STLP_ASSERT(_p_size > 0)
1.514 + if (_S_is_basic_char_type((_CharT *)0)) {
1.515 + // already eos terminated.
1.516 + this->_M_c_string = __d;
1.517 + }
1.518 + }
1.519 +
1.520 +# ifdef _STLP_NO_ARROW_OPERATOR
1.521 + _Rope_RopeLeaf() {}
1.522 + _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
1.523 +# endif
1.524 +
1.525 +// The constructor assumes that d has been allocated with
1.526 + // the proper allocator and the properly padded size.
1.527 + // In contrast, the destructor deallocates the data:
1.528 +# ifndef __GC
1.529 + ~_Rope_RopeLeaf() {
1.530 + if (_M_data != this->_M_c_string) {
1.531 + this->_M_free_c_string();
1.532 + }
1.533 + _Rope_RopeRep<_CharT,_Alloc>::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
1.534 + }
1.535 +# endif
1.536 +};
1.537 +
1.538 +template<class _CharT, class _Alloc>
1.539 +struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
1.540 +public:
1.541 + _Rope_RopeRep<_CharT,_Alloc>* _M_left;
1.542 + _Rope_RopeRep<_CharT,_Alloc>* _M_right;
1.543 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.544 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.545 + _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
1.546 + _Rope_RopeRep<_CharT,_Alloc>* __r,
1.547 + allocator_type __a)
1.548 + : _Rope_RopeRep<_CharT,_Alloc>(
1.549 + _Rope_RopeRep<_CharT,_Alloc>::_S_concat,
1.550 + (max)(__l->_M_depth, __r->_M_depth) + 1, false,
1.551 + __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
1.552 + {}
1.553 +# ifdef _STLP_NO_ARROW_OPERATOR
1.554 + _Rope_RopeConcatenation() {}
1.555 + _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
1.556 +# endif
1.557 +
1.558 +# ifndef __GC
1.559 + ~_Rope_RopeConcatenation() {
1.560 + this->_M_free_c_string();
1.561 + _M_left->_M_unref_nonnil();
1.562 + _M_right->_M_unref_nonnil();
1.563 + }
1.564 +# endif
1.565 +};
1.566 +
1.567 +template<class _CharT, class _Alloc>
1.568 +struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
1.569 +public:
1.570 + char_producer<_CharT>* _M_fn;
1.571 +# ifndef __GC
1.572 + bool _M_delete_when_done; // Char_producer is owned by the
1.573 + // rope and should be explicitly
1.574 + // deleted when the rope becomes
1.575 + // inaccessible.
1.576 +# else
1.577 + // In the GC case, we either register the rope for
1.578 + // finalization, or not. Thus the field is unnecessary;
1.579 + // the information is stored in the collector data structures.
1.580 + // We do need a finalization procedure to be invoked by the
1.581 + // collector.
1.582 + static void _S_fn_finalization_proc(void * __tree, void *) {
1.583 + delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
1.584 + }
1.585 +# endif
1.586 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.587 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.588 +# ifdef _STLP_NO_ARROW_OPERATOR
1.589 + _Rope_RopeFunction() {}
1.590 + _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
1.591 +# endif
1.592 +
1.593 + _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
1.594 + bool __d, allocator_type __a)
1.595 + :
1.596 + _Rope_RopeRep<_CharT,_Alloc>(_Rope_RopeRep<_CharT,_Alloc>::_S_function, 0, true, _p_size, __a),
1.597 + _M_fn(__f)
1.598 +# ifndef __GC
1.599 + , _M_delete_when_done(__d)
1.600 +# endif
1.601 + {
1.602 + _STLP_ASSERT(_p_size > 0)
1.603 +# ifdef __GC
1.604 + if (__d) {
1.605 + GC_REGISTER_FINALIZER(
1.606 + this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
1.607 + }
1.608 +# endif
1.609 + }
1.610 +# ifndef __GC
1.611 + ~_Rope_RopeFunction() {
1.612 + this->_M_free_c_string();
1.613 + if (_M_delete_when_done) {
1.614 + delete _M_fn;
1.615 + }
1.616 + }
1.617 +# endif
1.618 +};
1.619 +// Substring results are usually represented using just
1.620 +// concatenation nodes. But in the case of very long flat ropes
1.621 +// or ropes with a functional representation that isn't practical.
1.622 +// In that case, we represent the __result as a special case of
1.623 +// RopeFunction, whose char_producer points back to the rope itself.
1.624 +// In all cases except repeated substring operations and
1.625 +// deallocation, we treat the __result as a RopeFunction.
1.626 +template<class _CharT, class _Alloc>
1.627 +# if ( defined (__IBMCPP__) && (__IBMCPP__ == 500) ) // JFA 10-Aug-2000 for some reason xlC cares about the order
1.628 +struct _Rope_RopeSubstring : public char_producer<_CharT> , public _Rope_RopeFunction<_CharT,_Alloc>
1.629 +# else
1.630 +struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
1.631 + public char_producer<_CharT>
1.632 +# endif
1.633 +{
1.634 +public:
1.635 + // XXX this whole class should be rewritten.
1.636 + typedef _Rope_RopeRep<_CharT,_Alloc> _Base;
1.637 + _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
1.638 + size_t _M_start;
1.639 + virtual void operator()(size_t __start_pos, size_t __req_len,
1.640 + _CharT* __buffer) {
1.641 + switch(_M_base->_M_tag) {
1.642 + case _Base::_S_function:
1.643 + case _Base::_S_substringfn:
1.644 + {
1.645 + char_producer<_CharT>* __fn =
1.646 + ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
1.647 + _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
1.648 + _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
1.649 + (*__fn)(__start_pos + _M_start, __req_len, __buffer);
1.650 + }
1.651 + break;
1.652 + case _Base::_S_leaf:
1.653 + {
1.654 + __GC_CONST _CharT* __s =
1.655 + ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
1.656 + uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
1.657 + __buffer);
1.658 + }
1.659 + break;
1.660 + default:
1.661 + _STLP_ASSERT(false)
1.662 + ;
1.663 + }
1.664 + }
1.665 +
1.666 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.667 + typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
1.668 +
1.669 + _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1.670 + size_t __l, allocator_type __a)
1.671 + : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
1.672 + _M_base(__b),
1.673 + _M_start(__s)
1.674 +
1.675 + {
1.676 + _STLP_ASSERT(__l > 0)
1.677 + _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
1.678 +# ifndef __GC
1.679 + _M_base->_M_ref_nonnil();
1.680 +# endif
1.681 + this->_M_tag = _Base::_S_substringfn;
1.682 + }
1.683 + virtual ~_Rope_RopeSubstring()
1.684 + {
1.685 +# ifndef __GC
1.686 + _M_base->_M_unref_nonnil();
1.687 +# endif
1.688 + }
1.689 +};
1.690 +
1.691 +// Self-destructing pointers to Rope_rep.
1.692 +// These are not conventional smart pointers. Their
1.693 +// only purpose in life is to ensure that unref is called
1.694 +// on the pointer either at normal exit or if an exception
1.695 +// is raised. It is the caller's responsibility to
1.696 +// adjust reference counts when these pointers are initialized
1.697 +// or assigned to. (This convention significantly reduces
1.698 +// the number of potentially expensive reference count
1.699 +// updates.)
1.700 +#ifndef __GC
1.701 +template<class _CharT, class _Alloc>
1.702 +struct _Rope_self_destruct_ptr {
1.703 + _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
1.704 + ~_Rope_self_destruct_ptr()
1.705 + { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
1.706 +# ifdef _STLP_USE_EXCEPTIONS
1.707 + _Rope_self_destruct_ptr() : _M_ptr(0) {};
1.708 +# else
1.709 + _Rope_self_destruct_ptr() {};
1.710 +# endif
1.711 + _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
1.712 + _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
1.713 + _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
1.714 + operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
1.715 + _Rope_self_destruct_ptr<_CharT, _Alloc>&
1.716 + operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
1.717 + { _M_ptr = __x; return *this; }
1.718 +};
1.719 +#endif
1.720 +
1.721 +// Dereferencing a nonconst iterator has to return something
1.722 +// that behaves almost like a reference. It's not possible to
1.723 +// return an actual reference since assignment requires extra
1.724 +// work. And we would get into the same problems as with the
1.725 +// CD2 version of basic_string.
1.726 +template<class _CharT, class _Alloc>
1.727 +class _Rope_char_ref_proxy {
1.728 + typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
1.729 + friend class rope<_CharT,_Alloc>;
1.730 + friend class _Rope_iterator<_CharT,_Alloc>;
1.731 + friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1.732 +# ifdef __GC
1.733 + typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1.734 +# else
1.735 + typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1.736 +# endif
1.737 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.738 + typedef rope<_CharT,_Alloc> _My_rope;
1.739 + size_t _M_pos;
1.740 + _CharT _M_current;
1.741 + bool _M_current_valid;
1.742 + _My_rope* _M_root; // The whole rope.
1.743 +public:
1.744 + _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
1.745 + _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
1.746 + _Rope_char_ref_proxy(const _Self& __x) :
1.747 + _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
1.748 + // Don't preserve cache if the reference can outlive the
1.749 + // expression. We claim that's not possible without calling
1.750 + // a copy constructor or generating reference to a proxy
1.751 + // reference. We declare the latter to have undefined semantics.
1.752 + _Rope_char_ref_proxy(_My_rope* __r, size_t __p,
1.753 + _CharT __c) :
1.754 + _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
1.755 + inline operator _CharT () const;
1.756 + _Self& operator= (_CharT __c);
1.757 + _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
1.758 + _Self& operator= (const _Self& __c) {
1.759 + return operator=((_CharT)__c);
1.760 + }
1.761 +};
1.762 +
1.763 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
1.764 +template<class _CharT, class __Alloc>
1.765 +inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
1.766 + _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
1.767 + _CharT __tmp = __a;
1.768 + __a = __b;
1.769 + __b = __tmp;
1.770 +}
1.771 +#else
1.772 +// There is no really acceptable way to handle this. The default
1.773 +// definition of swap doesn't work for proxy references.
1.774 +// It can't really be made to work, even with ugly hacks, since
1.775 +// the only unusual operation it uses is the copy constructor, which
1.776 +// is needed for other purposes. We provide a macro for
1.777 +// full specializations, and instantiate the most common case.
1.778 +# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
1.779 + inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
1.780 + _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
1.781 + _CharT __tmp = __a; \
1.782 + __a = __b; \
1.783 + __b = __tmp; \
1.784 + }
1.785 +
1.786 +_ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
1.787 +
1.788 +#endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
1.789 +
1.790 + template<class _CharT, class _Alloc>
1.791 +class _Rope_char_ptr_proxy {
1.792 + // XXX this class should be rewritten.
1.793 +public:
1.794 + typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
1.795 + friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1.796 + size_t _M_pos;
1.797 + rope<_CharT,_Alloc>* _M_root; // The whole rope.
1.798 +
1.799 + _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
1.800 + : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
1.801 + _Rope_char_ptr_proxy(const _Self& __x)
1.802 + : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
1.803 + _Rope_char_ptr_proxy() {}
1.804 + _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
1.805 + _STLP_ASSERT(0 == __x)
1.806 + }
1.807 + _Self&
1.808 + operator= (const _Self& __x) {
1.809 + _M_pos = __x._M_pos;
1.810 + _M_root = __x._M_root;
1.811 + return *this;
1.812 + }
1.813 +
1.814 + _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
1.815 + return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
1.816 + }
1.817 +};
1.818 +
1.819 +
1.820 +// Rope iterators:
1.821 +// Unlike in the C version, we cache only part of the stack
1.822 +// for rope iterators, since they must be efficiently copyable.
1.823 +// When we run out of cache, we have to reconstruct the iterator
1.824 +// value.
1.825 +// Pointers from iterators are not included in reference counts.
1.826 +// Iterators are assumed to be thread private. Ropes can
1.827 +// be shared.
1.828 +
1.829 +template<class _CharT, class _Alloc>
1.830 +class _Rope_iterator_base
1.831 +/* : public random_access_iterator<_CharT, ptrdiff_t> */
1.832 +{
1.833 + friend class rope<_CharT,_Alloc>;
1.834 + typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
1.835 +public:
1.836 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.837 + // Borland doesnt want this to be protected.
1.838 + // protected:
1.839 + enum { _S_path_cache_len = 4 }; // Must be <= 9.
1.840 + enum { _S_iterator_buf_len = 15 };
1.841 + size_t _M_current_pos;
1.842 + _RopeRep* _M_root; // The whole rope.
1.843 + size_t _M_leaf_pos; // Starting position for current leaf
1.844 + __GC_CONST _CharT* _M_buf_start;
1.845 + // Buffer possibly
1.846 + // containing current char.
1.847 + __GC_CONST _CharT* _M_buf_ptr;
1.848 + // Pointer to current char in buffer.
1.849 + // != 0 ==> buffer valid.
1.850 + __GC_CONST _CharT* _M_buf_end;
1.851 + // One past __last valid char in buffer.
1.852 + // What follows is the path cache. We go out of our
1.853 + // way to make this compact.
1.854 + // Path_end contains the bottom section of the path from
1.855 + // the root to the current leaf.
1.856 + const _RopeRep* _M_path_end[_S_path_cache_len];
1.857 + int _M_leaf_index; // Last valid __pos in path_end;
1.858 + // _M_path_end[0] ... _M_path_end[leaf_index-1]
1.859 + // point to concatenation nodes.
1.860 + unsigned char _M_path_directions;
1.861 + // (path_directions >> __i) & 1 is 1
1.862 + // iff we got from _M_path_end[leaf_index - __i - 1]
1.863 + // to _M_path_end[leaf_index - __i] by going to the
1.864 + // __right. Assumes path_cache_len <= 9.
1.865 + _CharT _M_tmp_buf[_S_iterator_buf_len];
1.866 + // Short buffer for surrounding chars.
1.867 + // This is useful primarily for
1.868 + // RopeFunctions. We put the buffer
1.869 + // here to avoid locking in the
1.870 + // multithreaded case.
1.871 + // The cached path is generally assumed to be valid
1.872 + // only if the buffer is valid.
1.873 + static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.874 + // Set buffer contents given
1.875 + // path cache.
1.876 + static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.877 + // Set buffer contents and
1.878 + // path cache.
1.879 + static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
1.880 + // As above, but assumes path
1.881 + // cache is valid for previous posn.
1.882 + _Rope_iterator_base() {}
1.883 + _Rope_iterator_base(_RopeRep* __root, size_t __pos)
1.884 + : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
1.885 + void _M_incr(size_t __n);
1.886 + void _M_decr(size_t __n);
1.887 +public:
1.888 + size_t index() const { return _M_current_pos; }
1.889 + _Rope_iterator_base(const _Self& __x) {
1.890 + if (0 != __x._M_buf_ptr) {
1.891 + *this = __x;
1.892 + } else {
1.893 + _M_current_pos = __x._M_current_pos;
1.894 + _M_root = __x._M_root;
1.895 + _M_buf_ptr = 0;
1.896 + }
1.897 + }
1.898 +};
1.899 +
1.900 +template<class _CharT, class _Alloc> class _Rope_iterator;
1.901 +
1.902 +template<class _CharT, class _Alloc>
1.903 +class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1.904 + friend class rope<_CharT,_Alloc>;
1.905 + typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
1.906 + typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
1.907 + // protected:
1.908 +public:
1.909 +# ifndef _STLP_HAS_NO_NAMESPACES
1.910 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.911 + // The one from the base class may not be directly visible.
1.912 +# endif
1.913 + _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
1.914 + _Rope_iterator_base<_CharT,_Alloc>(
1.915 + __CONST_CAST(_RopeRep*,__root), __pos)
1.916 + // Only nonconst iterators modify root ref count
1.917 + {}
1.918 +public:
1.919 + typedef _CharT reference; // Really a value. Returning a reference
1.920 + // Would be a mess, since it would have
1.921 + // to be included in refcount.
1.922 + typedef const _CharT* pointer;
1.923 + typedef _CharT value_type;
1.924 + typedef ptrdiff_t difference_type;
1.925 + typedef random_access_iterator_tag iterator_category;
1.926 +
1.927 +public:
1.928 + _Rope_const_iterator() {};
1.929 + _Rope_const_iterator(const _Self& __x) :
1.930 + _Rope_iterator_base<_CharT,_Alloc>(__x) { }
1.931 + _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
1.932 + _Rope_iterator_base<_CharT,_Alloc>(__x) {}
1.933 + _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
1.934 + _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
1.935 + _Self& operator= (const _Self& __x) {
1.936 + if (0 != __x._M_buf_ptr) {
1.937 + *(__STATIC_CAST(_Base*,this)) = __x;
1.938 + } else {
1.939 + this->_M_current_pos = __x._M_current_pos;
1.940 + this->_M_root = __x._M_root;
1.941 + this->_M_buf_ptr = 0;
1.942 + }
1.943 + return(*this);
1.944 + }
1.945 + reference operator*() {
1.946 + if (0 == this->_M_buf_ptr) _S_setcache(*this);
1.947 + return *(this->_M_buf_ptr);
1.948 + }
1.949 + _Self& operator++() {
1.950 + __GC_CONST _CharT* __next;
1.951 + if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
1.952 + this->_M_buf_ptr = __next;
1.953 + ++this->_M_current_pos;
1.954 + } else {
1.955 + this->_M_incr(1);
1.956 + }
1.957 + return *this;
1.958 + }
1.959 + _Self& operator+=(ptrdiff_t __n) {
1.960 + if (__n >= 0) {
1.961 + this->_M_incr(__n);
1.962 + } else {
1.963 + this->_M_decr(-__n);
1.964 + }
1.965 + return *this;
1.966 + }
1.967 + _Self& operator--() {
1.968 + this->_M_decr(1);
1.969 + return *this;
1.970 + }
1.971 + _Self& operator-=(ptrdiff_t __n) {
1.972 + if (__n >= 0) {
1.973 + this->_M_decr(__n);
1.974 + } else {
1.975 + this->_M_incr(-__n);
1.976 + }
1.977 + return *this;
1.978 + }
1.979 + _Self operator++(int) {
1.980 + size_t __old_pos = this->_M_current_pos;
1.981 + this->_M_incr(1);
1.982 + return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1.983 + // This makes a subsequent dereference expensive.
1.984 + // Perhaps we should instead copy the iterator
1.985 + // if it has a valid cache?
1.986 + }
1.987 + _Self operator--(int) {
1.988 + size_t __old_pos = this->_M_current_pos;
1.989 + this->_M_decr(1);
1.990 + return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1.991 + }
1.992 + inline reference operator[](size_t __n);
1.993 +};
1.994 +
1.995 +template<class _CharT, class _Alloc>
1.996 +class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1.997 + friend class rope<_CharT,_Alloc>;
1.998 + typedef _Rope_iterator<_CharT, _Alloc> _Self;
1.999 + typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
1.1000 + typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1.1001 + // protected:
1.1002 +public:
1.1003 + rope<_CharT,_Alloc>* _M_root_rope;
1.1004 + // root is treated as a cached version of this,
1.1005 + // and is used to detect changes to the underlying
1.1006 + // rope.
1.1007 + // Root is included in the reference count.
1.1008 + // This is necessary so that we can detect changes reliably.
1.1009 + // Unfortunately, it requires careful bookkeeping for the
1.1010 + // nonGC case.
1.1011 + _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
1.1012 +
1.1013 + void _M_check();
1.1014 +public:
1.1015 + typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1.1016 + typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1.1017 + typedef _CharT value_type;
1.1018 + typedef ptrdiff_t difference_type;
1.1019 + typedef random_access_iterator_tag iterator_category;
1.1020 +public:
1.1021 + ~_Rope_iterator() //*TY 5/6/00 - added dtor to balance reference count
1.1022 + {
1.1023 + _RopeRep::_S_unref(this->_M_root);
1.1024 + }
1.1025 +
1.1026 + rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1.1027 + _Rope_iterator() {
1.1028 + this->_M_root = 0; // Needed for reference counting.
1.1029 + };
1.1030 + _Rope_iterator(const _Self& __x) :
1.1031 + _Rope_iterator_base<_CharT,_Alloc>(__x) {
1.1032 + _M_root_rope = __x._M_root_rope;
1.1033 + _RopeRep::_S_ref(this->_M_root);
1.1034 + }
1.1035 + _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1.1036 + _Self& operator= (const _Self& __x) {
1.1037 + _RopeRep* __old = this->_M_root;
1.1038 +
1.1039 + _RopeRep::_S_ref(__x._M_root);
1.1040 + if (0 != __x._M_buf_ptr) {
1.1041 + _M_root_rope = __x._M_root_rope;
1.1042 + *(__STATIC_CAST(_Base*,this)) = __x;
1.1043 + } else {
1.1044 + this->_M_current_pos = __x._M_current_pos;
1.1045 + this->_M_root = __x._M_root;
1.1046 + _M_root_rope = __x._M_root_rope;
1.1047 + this->_M_buf_ptr = 0;
1.1048 + }
1.1049 + _RopeRep::_S_unref(__old);
1.1050 + return(*this);
1.1051 + }
1.1052 + reference operator*() {
1.1053 + _M_check();
1.1054 + if (0 == this->_M_buf_ptr) {
1.1055 + return _Rope_char_ref_proxy<_CharT,_Alloc>(
1.1056 + _M_root_rope, this->_M_current_pos);
1.1057 + } else {
1.1058 + return _Rope_char_ref_proxy<_CharT,_Alloc>(
1.1059 + _M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
1.1060 + }
1.1061 + }
1.1062 + _Self& operator++() {
1.1063 + this->_M_incr(1);
1.1064 + return *this;
1.1065 + }
1.1066 + _Self& operator+=(ptrdiff_t __n) {
1.1067 + if (__n >= 0) {
1.1068 + this->_M_incr(__n);
1.1069 + } else {
1.1070 + this->_M_decr(-__n);
1.1071 + }
1.1072 + return *this;
1.1073 + }
1.1074 + _Self& operator--() {
1.1075 + this->_M_decr(1);
1.1076 + return *this;
1.1077 + }
1.1078 + _Self& operator-=(ptrdiff_t __n) {
1.1079 + if (__n >= 0) {
1.1080 + this->_M_decr(__n);
1.1081 + } else {
1.1082 + this->_M_incr(-__n);
1.1083 + }
1.1084 + return *this;
1.1085 + }
1.1086 + _Self operator++(int) {
1.1087 + size_t __old_pos = this->_M_current_pos;
1.1088 + this->_M_incr(1);
1.1089 + return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1.1090 + }
1.1091 + _Self operator--(int) {
1.1092 + size_t __old_pos = this->_M_current_pos;
1.1093 + this->_M_decr(1);
1.1094 + return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1.1095 + }
1.1096 + reference operator[](ptrdiff_t __n) {
1.1097 + return _Rope_char_ref_proxy<_CharT,_Alloc>(
1.1098 + _M_root_rope, this->_M_current_pos + __n);
1.1099 + }
1.1100 +};
1.1101 +
1.1102 +# ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1.1103 +template <class _CharT, class _Alloc>
1.1104 +inline random_access_iterator_tag
1.1105 +iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
1.1106 +template <class _CharT, class _Alloc>
1.1107 +inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1.1108 +template <class _CharT, class _Alloc>
1.1109 +inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1.1110 +template <class _CharT, class _Alloc>
1.1111 +inline random_access_iterator_tag
1.1112 +iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1.1113 +template <class _CharT, class _Alloc>
1.1114 +inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1.1115 +template <class _CharT, class _Alloc>
1.1116 +inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1.1117 +#endif
1.1118 +
1.1119 +template <class _CharT, class _Alloc>
1.1120 +class rope {
1.1121 + typedef rope<_CharT,_Alloc> _Self;
1.1122 +public:
1.1123 + typedef _CharT value_type;
1.1124 + typedef ptrdiff_t difference_type;
1.1125 + typedef size_t size_type;
1.1126 + typedef _CharT const_reference;
1.1127 + typedef const _CharT* const_pointer;
1.1128 + typedef _Rope_iterator<_CharT,_Alloc> iterator;
1.1129 + typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1.1130 + typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1.1131 + typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1.1132 +
1.1133 + friend class _Rope_iterator<_CharT,_Alloc>;
1.1134 + friend class _Rope_const_iterator<_CharT,_Alloc>;
1.1135 + friend struct _Rope_RopeRep<_CharT,_Alloc>;
1.1136 + friend class _Rope_iterator_base<_CharT,_Alloc>;
1.1137 + friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1.1138 + friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1.1139 + friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1.1140 +
1.1141 + _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1.1142 +
1.1143 +protected:
1.1144 + typedef __GC_CONST _CharT* _Cstrptr;
1.1145 +
1.1146 + static _CharT _S_empty_c_str[1];
1.1147 +
1.1148 + static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1.1149 + enum { _S_copy_max = 23 };
1.1150 + // For strings shorter than _S_copy_max, we copy to
1.1151 + // concatenate.
1.1152 +
1.1153 +public:
1.1154 + typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1.1155 + _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1.1156 + typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
1.1157 + allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1.1158 +public:
1.1159 + // The only data member of a rope:
1.1160 + _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1.1161 +
1.1162 + typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1.1163 + typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1.1164 + typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1.1165 + typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1.1166 +
1.1167 +
1.1168 +
1.1169 + // Retrieve a character at the indicated position.
1.1170 + static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1.1171 +
1.1172 +# ifndef __GC
1.1173 + // Obtain a pointer to the character at the indicated position.
1.1174 + // The pointer can be used to change the character.
1.1175 + // If such a pointer cannot be produced, as is frequently the
1.1176 + // case, 0 is returned instead.
1.1177 + // (Returns nonzero only if all nodes in the path have a refcount
1.1178 + // of 1.)
1.1179 + static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1.1180 +# endif
1.1181 +
1.1182 + static bool _S_apply_to_pieces(
1.1183 + // should be template parameter
1.1184 + _Rope_char_consumer<_CharT>& __c,
1.1185 + const _RopeRep* __r,
1.1186 + size_t __begin, size_t __end);
1.1187 + // begin and end are assumed to be in range.
1.1188 +
1.1189 +# ifndef __GC
1.1190 + static void _S_unref(_RopeRep* __t)
1.1191 + {
1.1192 + _RopeRep::_S_unref(__t);
1.1193 + }
1.1194 + static void _S_ref(_RopeRep* __t)
1.1195 + {
1.1196 + _RopeRep::_S_ref(__t);
1.1197 + }
1.1198 +# else /* __GC */
1.1199 + static void _S_unref(_RopeRep*) {}
1.1200 + static void _S_ref(_RopeRep*) {}
1.1201 +# endif
1.1202 +
1.1203 +
1.1204 +# ifdef __GC
1.1205 + typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1.1206 +# else
1.1207 + typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1.1208 +# endif
1.1209 +
1.1210 + // _Result is counted in refcount.
1.1211 + static _RopeRep* _S_substring(_RopeRep* __base,
1.1212 + size_t __start, size_t __endp1);
1.1213 +
1.1214 + static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1.1215 + const _CharT* __iter, size_t __slen);
1.1216 + // Concatenate rope and char ptr, copying __s.
1.1217 + // Should really take an arbitrary iterator.
1.1218 + // Result is counted in refcount.
1.1219 + static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1.1220 + const _CharT* __iter, size_t __slen)
1.1221 + // As above, but one reference to __r is about to be
1.1222 + // destroyed. Thus the pieces may be recycled if all
1.1223 + // relevent reference counts are 1.
1.1224 +# ifdef __GC
1.1225 + // We can't really do anything since refcounts are unavailable.
1.1226 + { return _S_concat_char_iter(__r, __iter, __slen); }
1.1227 +# else
1.1228 + ;
1.1229 +# endif
1.1230 +
1.1231 + static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1.1232 + // General concatenation on _RopeRep. _Result
1.1233 + // has refcount of 1. Adjusts argument refcounts.
1.1234 +
1.1235 +public:
1.1236 + void apply_to_pieces( size_t __begin, size_t __end,
1.1237 + _Rope_char_consumer<_CharT>& __c) const {
1.1238 + _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end);
1.1239 + }
1.1240 +
1.1241 +
1.1242 +protected:
1.1243 +
1.1244 + static size_t _S_rounded_up_size(size_t __n) {
1.1245 + return _RopeRep::_S_rounded_up_size(__n);
1.1246 + }
1.1247 +
1.1248 + static size_t _S_allocated_capacity(size_t __n) {
1.1249 + if (_S_is_basic_char_type((_CharT*)0)) {
1.1250 + return _S_rounded_up_size(__n) - 1;
1.1251 + } else {
1.1252 + return _S_rounded_up_size(__n);
1.1253 + }
1.1254 + }
1.1255 +
1.1256 + // Allocate and construct a RopeLeaf using the supplied allocator
1.1257 + // Takes ownership of s instead of copying.
1.1258 + static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1.1259 + size_t _p_size, allocator_type __a)
1.1260 + {
1.1261 + _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _RopeLeaf).allocate(1,(const void*)0);
1.1262 + _STLP_TRY {
1.1263 + _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1.1264 + }
1.1265 + _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1266 + _RopeLeaf).deallocate(__space, 1))
1.1267 + return __space;
1.1268 + }
1.1269 +
1.1270 + static _RopeConcatenation* _S_new_RopeConcatenation(
1.1271 + _RopeRep* __left, _RopeRep* __right,
1.1272 + allocator_type __a)
1.1273 + {
1.1274 + _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1275 + _RopeConcatenation).allocate(1,(const void*)0);
1.1276 + return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1.1277 + }
1.1278 +
1.1279 + static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1.1280 + size_t _p_size, bool __d, allocator_type __a)
1.1281 + {
1.1282 + _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1283 + _RopeFunction).allocate(1,(const void*)0);
1.1284 + return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1.1285 + }
1.1286 +
1.1287 + static _RopeSubstring* _S_new_RopeSubstring(
1.1288 + _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1.1289 + size_t __l, allocator_type __a)
1.1290 + {
1.1291 + _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type,__a,
1.1292 + _RopeSubstring).allocate(1,(const void*)0);
1.1293 + return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1.1294 + }
1.1295 +
1.1296 +# define _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _p_size, __a) \
1.1297 + _S_RopeLeaf_from_unowned_char_ptr(__s, _p_size, __a)
1.1298 +
1.1299 + static
1.1300 + _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1.1301 + size_t _p_size, allocator_type __a)
1.1302 + {
1.1303 + if (0 == _p_size) return 0;
1.1304 +
1.1305 + _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1.1306 +
1.1307 + uninitialized_copy_n(__s, _p_size, __buf);
1.1308 + _S_cond_store_eos(__buf[_p_size]);
1.1309 +
1.1310 + _STLP_TRY {
1.1311 + return _S_new_RopeLeaf(__buf, _p_size, __a);
1.1312 + }
1.1313 + _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1.1314 +
1.1315 +# if defined (_STLP_THROW_RETURN_BUG)
1.1316 + return 0;
1.1317 +# endif
1.1318 + }
1.1319 +
1.1320 +
1.1321 + // Concatenation of nonempty strings.
1.1322 + // Always builds a concatenation node.
1.1323 + // Rebalances if the result is too deep.
1.1324 + // Result has refcount 1.
1.1325 + // Does not increment left and right ref counts even though
1.1326 + // they are referenced.
1.1327 + static _RopeRep*
1.1328 + _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1.1329 +
1.1330 + // Concatenation helper functions
1.1331 + static _RopeLeaf*
1.1332 + _S_leaf_concat_char_iter(_RopeLeaf* __r,
1.1333 + const _CharT* __iter, size_t __slen);
1.1334 + // Concatenate by copying leaf.
1.1335 + // should take an arbitrary iterator
1.1336 + // result has refcount 1.
1.1337 +# ifndef __GC
1.1338 + static _RopeLeaf* _S_destr_leaf_concat_char_iter
1.1339 + (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1.1340 + // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1.1341 +# endif
1.1342 +
1.1343 +
1.1344 + // A helper function for exponentiating strings.
1.1345 + // This uses a nonstandard refcount convention.
1.1346 + // The result has refcount 0.
1.1347 + friend struct _Rope_Concat_fn<_CharT,_Alloc>;
1.1348 + typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1.1349 +
1.1350 +public:
1.1351 + static size_t _S_char_ptr_len(const _CharT* __s) {
1.1352 + const _CharT* __p = __s;
1.1353 +
1.1354 + while (!_S_is0(*__p)) { ++__p; }
1.1355 + return (__p - __s);
1.1356 + }
1.1357 +
1.1358 +public: /* for operators */
1.1359 + rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1.1360 + : _M_tree_ptr(__a, __t) { }
1.1361 +private:
1.1362 + // Copy __r to the _CharT buffer.
1.1363 + // Returns __buffer + __r->_M_size._M_data.
1.1364 + // Assumes that buffer is uninitialized.
1.1365 + static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1.1366 +
1.1367 + // Again, with explicit starting position and length.
1.1368 + // Assumes that buffer is uninitialized.
1.1369 + static _CharT* _S_flatten(_RopeRep* __r,
1.1370 + size_t __start, size_t __len,
1.1371 + _CharT* __buffer);
1.1372 +
1.1373 + // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1.1374 +public:
1.1375 + static const unsigned long _S_min_len[46];
1.1376 +protected:
1.1377 + static bool _S_is_balanced(_RopeRep* __r)
1.1378 + { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1.1379 +
1.1380 + static bool _S_is_almost_balanced(_RopeRep* __r)
1.1381 + { return (__r->_M_depth == 0 ||
1.1382 + __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]); }
1.1383 +
1.1384 + static bool _S_is_roughly_balanced(_RopeRep* __r)
1.1385 + { return (__r->_M_depth <= 1 ||
1.1386 + __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]); }
1.1387 +
1.1388 + // Assumes the result is not empty.
1.1389 + static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1.1390 + _RopeRep* __right)
1.1391 + {
1.1392 + _RopeRep* __result = _S_concat_rep(__left, __right);
1.1393 + if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1.1394 + return __result;
1.1395 + }
1.1396 +
1.1397 + // The basic rebalancing operation. Logically copies the
1.1398 + // rope. The result has refcount of 1. The client will
1.1399 + // usually decrement the reference count of __r.
1.1400 + // The result is within height 2 of balanced by the above
1.1401 + // definition.
1.1402 + static _RopeRep* _S_balance(_RopeRep* __r);
1.1403 +
1.1404 + // Add all unbalanced subtrees to the forest of balanceed trees.
1.1405 + // Used only by balance.
1.1406 + static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1.1407 +
1.1408 + // Add __r to forest, assuming __r is already balanced.
1.1409 + static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1.1410 +
1.1411 + // Print to stdout, exposing structure
1.1412 + static void _S_dump(_RopeRep* __r, int __indent = 0);
1.1413 +
1.1414 + // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1.1415 + static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1.1416 +
1.1417 +public:
1.1418 + bool empty() const { return 0 == _M_tree_ptr._M_data; }
1.1419 +
1.1420 + // Comparison member function. This is public only for those
1.1421 + // clients that need a ternary comparison. Others
1.1422 + // should use the comparison operators below.
1.1423 + int compare(const _Self& __y) const {
1.1424 + return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1.1425 + }
1.1426 +
1.1427 + rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1.1428 + : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),__a))
1.1429 + { }
1.1430 +
1.1431 + rope(const _CharT* __s, size_t __len,
1.1432 + const allocator_type& __a = allocator_type())
1.1433 + : _M_tree_ptr(__a, (_STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a)))
1.1434 + { }
1.1435 +
1.1436 + // Should perhaps be templatized with respect to the iterator type
1.1437 + // and use Sequence_buffer. (It should perhaps use sequence_buffer
1.1438 + // even now.)
1.1439 + rope(const _CharT *__s, const _CharT *__e,
1.1440 + const allocator_type& __a = allocator_type())
1.1441 + : _M_tree_ptr(__a, _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a))
1.1442 + { }
1.1443 +
1.1444 + rope(const const_iterator& __s, const const_iterator& __e,
1.1445 + const allocator_type& __a = allocator_type())
1.1446 + : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1.1447 + __e._M_current_pos))
1.1448 + { }
1.1449 +
1.1450 + rope(const iterator& __s, const iterator& __e,
1.1451 + const allocator_type& __a = allocator_type())
1.1452 + : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1.1453 + __e._M_current_pos))
1.1454 + { }
1.1455 +
1.1456 + rope(_CharT __c, const allocator_type& __a = allocator_type())
1.1457 + : _M_tree_ptr(__a, (_RopeRep*)0)
1.1458 + {
1.1459 + _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1.1460 +
1.1461 + _Construct(__buf, __c);
1.1462 + _STLP_TRY {
1.1463 + _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1.1464 + }
1.1465 + _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1.1466 + }
1.1467 +
1.1468 + rope(size_t __n, _CharT __c,
1.1469 + const allocator_type& __a = allocator_type()):
1.1470 + _M_tree_ptr(__a, (_RopeRep*)0) {
1.1471 + rope<_CharT,_Alloc> __result;
1.1472 +# define __exponentiate_threshold size_t(32)
1.1473 + _RopeRep* __remainder;
1.1474 + rope<_CharT,_Alloc> __remainder_rope;
1.1475 +
1.1476 + // gcc-2.7.2 bugs
1.1477 + typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1.1478 +
1.1479 + if (0 == __n)
1.1480 + return;
1.1481 +
1.1482 + size_t __exponent = __n / __exponentiate_threshold;
1.1483 + size_t __rest = __n % __exponentiate_threshold;
1.1484 + if (0 == __rest) {
1.1485 + __remainder = 0;
1.1486 + } else {
1.1487 + _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1.1488 + uninitialized_fill_n(__rest_buffer, __rest, __c);
1.1489 + _S_cond_store_eos(__rest_buffer[__rest]);
1.1490 + _STLP_TRY {
1.1491 + __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1.1492 + }
1.1493 + _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1.1494 + }
1.1495 + __remainder_rope._M_tree_ptr._M_data = __remainder;
1.1496 + if (__exponent != 0) {
1.1497 + _CharT* __base_buffer =
1.1498 + _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1.1499 + _RopeLeaf* __base_leaf;
1.1500 + rope<_CharT,_Alloc> __base_rope;
1.1501 + uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1.1502 + _S_cond_store_eos(__base_buffer[__exponentiate_threshold]);
1.1503 + _STLP_TRY {
1.1504 + __base_leaf = _S_new_RopeLeaf(__base_buffer,
1.1505 + __exponentiate_threshold, __a);
1.1506 + }
1.1507 + _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1.1508 + __exponentiate_threshold, __a))
1.1509 + __base_rope._M_tree_ptr._M_data = __base_leaf;
1.1510 + if (1 == __exponent) {
1.1511 + __result = __base_rope;
1.1512 +# ifndef __GC
1.1513 + _STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1.1514 + // One each for base_rope and __result
1.1515 +# endif
1.1516 + } else {
1.1517 + __result = power(__base_rope, __exponent, _Concat_fn());
1.1518 + }
1.1519 + if (0 != __remainder) {
1.1520 + __result += __remainder_rope;
1.1521 + }
1.1522 + } else {
1.1523 + __result = __remainder_rope;
1.1524 + }
1.1525 + _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
1.1526 + _M_tree_ptr._M_data->_M_ref_nonnil();
1.1527 +# undef __exponentiate_threshold
1.1528 + }
1.1529 +
1.1530 + rope(const allocator_type& __a = allocator_type())
1.1531 + : _M_tree_ptr(__a, (_RopeRep*)0) {}
1.1532 +
1.1533 + // Construct a rope from a function that can compute its members
1.1534 + rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1.1535 + const allocator_type& __a = allocator_type())
1.1536 + : _M_tree_ptr(__a, (_RopeRep*)0)
1.1537 + {
1.1538 + _M_tree_ptr._M_data = (0 == __len) ?
1.1539 + 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1.1540 + }
1.1541 +
1.1542 + rope(const _Self& __x)
1.1543 + : _M_tree_ptr(__x.get_allocator(), __x._M_tree_ptr._M_data)
1.1544 + {
1.1545 + _S_ref(_M_tree_ptr._M_data);
1.1546 + }
1.1547 +
1.1548 + ~rope()
1.1549 + {
1.1550 + _S_unref(_M_tree_ptr._M_data);
1.1551 + }
1.1552 +
1.1553 + _Self& operator=(const _Self& __x)
1.1554 + {
1.1555 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1556 + _STLP_ASSERT(get_allocator() == __x.get_allocator())
1.1557 + _M_tree_ptr._M_data = __x._M_tree_ptr._M_data;
1.1558 + _S_ref(_M_tree_ptr._M_data);
1.1559 + _S_unref(__old);
1.1560 + return(*this);
1.1561 + }
1.1562 + void clear()
1.1563 + {
1.1564 + _S_unref(_M_tree_ptr._M_data);
1.1565 + _M_tree_ptr._M_data = 0;
1.1566 + }
1.1567 + void push_back(_CharT __x)
1.1568 + {
1.1569 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1570 + _M_tree_ptr._M_data = _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1);
1.1571 + _S_unref(__old);
1.1572 + }
1.1573 +
1.1574 + void pop_back()
1.1575 + {
1.1576 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1577 + _M_tree_ptr._M_data =
1.1578 + _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
1.1579 + _S_unref(__old);
1.1580 + }
1.1581 +
1.1582 + _CharT back() const
1.1583 + {
1.1584 + return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
1.1585 + }
1.1586 +
1.1587 + void push_front(_CharT __x)
1.1588 + {
1.1589 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1590 + _RopeRep* __left =
1.1591 + _STLP_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
1.1592 + _STLP_TRY {
1.1593 + _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
1.1594 + _S_unref(__old);
1.1595 + _S_unref(__left);
1.1596 + }
1.1597 + _STLP_UNWIND(_S_unref(__left))
1.1598 + }
1.1599 +
1.1600 + void pop_front()
1.1601 + {
1.1602 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1603 + _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
1.1604 + _S_unref(__old);
1.1605 + }
1.1606 +
1.1607 + _CharT front() const
1.1608 + {
1.1609 + return _S_fetch(_M_tree_ptr._M_data, 0);
1.1610 + }
1.1611 +
1.1612 + void balance()
1.1613 + {
1.1614 + _RopeRep* __old = _M_tree_ptr._M_data;
1.1615 + _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
1.1616 + _S_unref(__old);
1.1617 + }
1.1618 +
1.1619 + void copy(_CharT* __buffer) const {
1.1620 + _STLP_STD::_Destroy(__buffer, __buffer + size());
1.1621 + _S_flatten(_M_tree_ptr._M_data, __buffer);
1.1622 + }
1.1623 +
1.1624 + // This is the copy function from the standard, but
1.1625 + // with the arguments reordered to make it consistent with the
1.1626 + // rest of the interface.
1.1627 + // Note that this guaranteed not to compile if the draft standard
1.1628 + // order is assumed.
1.1629 + size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1.1630 + {
1.1631 + size_t _p_size = size();
1.1632 + size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
1.1633 +
1.1634 + _STLP_STD::_Destroy(__buffer, __buffer + __len);
1.1635 + _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
1.1636 + return __len;
1.1637 + }
1.1638 +
1.1639 + // Print to stdout, exposing structure. May be useful for
1.1640 + // performance debugging.
1.1641 + void dump() {
1.1642 + _S_dump(_M_tree_ptr._M_data);
1.1643 + }
1.1644 +
1.1645 + // Convert to 0 terminated string in new allocated memory.
1.1646 + // Embedded 0s in the input do not terminate the copy.
1.1647 + const _CharT* c_str() const;
1.1648 +
1.1649 + // As above, but lso use the flattened representation as the
1.1650 + // the new rope representation.
1.1651 + const _CharT* replace_with_c_str();
1.1652 +
1.1653 + // Reclaim memory for the c_str generated flattened string.
1.1654 + // Intentionally undocumented, since it's hard to say when this
1.1655 + // is safe for multiple threads.
1.1656 + void delete_c_str () {
1.1657 + if (0 == _M_tree_ptr._M_data) return;
1.1658 + if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
1.1659 + ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
1.1660 + _M_tree_ptr._M_data->_M_c_string) {
1.1661 + // Representation shared
1.1662 + return;
1.1663 + }
1.1664 +# ifndef __GC
1.1665 + _M_tree_ptr._M_data->_M_free_c_string();
1.1666 +# endif
1.1667 + _M_tree_ptr._M_data->_M_c_string = 0;
1.1668 + }
1.1669 +
1.1670 + _CharT operator[] (size_type __pos) const {
1.1671 + return _S_fetch(_M_tree_ptr._M_data, __pos);
1.1672 + }
1.1673 +
1.1674 + _CharT at(size_type __pos) const {
1.1675 + // if (__pos >= size()) throw out_of_range; // XXX
1.1676 + return (*this)[__pos];
1.1677 + }
1.1678 +
1.1679 + const_iterator begin() const {
1.1680 + return(const_iterator(_M_tree_ptr._M_data, 0));
1.1681 + }
1.1682 +
1.1683 + // An easy way to get a const iterator from a non-const container.
1.1684 + const_iterator const_begin() const {
1.1685 + return(const_iterator(_M_tree_ptr._M_data, 0));
1.1686 + }
1.1687 +
1.1688 + const_iterator end() const {
1.1689 + return(const_iterator(_M_tree_ptr._M_data, size()));
1.1690 + }
1.1691 +
1.1692 + const_iterator const_end() const {
1.1693 + return(const_iterator(_M_tree_ptr._M_data, size()));
1.1694 + }
1.1695 +
1.1696 + size_type size() const {
1.1697 + return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
1.1698 + }
1.1699 +
1.1700 + size_type length() const {
1.1701 + return size();
1.1702 + }
1.1703 +
1.1704 + size_type max_size() const {
1.1705 + return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
1.1706 + // Guarantees that the result can be sufficirntly
1.1707 + // balanced. Longer ropes will probably still work,
1.1708 + // but it's harder to make guarantees.
1.1709 + }
1.1710 +
1.1711 + const_reverse_iterator rbegin() const {
1.1712 + return const_reverse_iterator(end());
1.1713 + }
1.1714 +
1.1715 + const_reverse_iterator const_rbegin() const {
1.1716 + return const_reverse_iterator(end());
1.1717 + }
1.1718 +
1.1719 + const_reverse_iterator rend() const {
1.1720 + return const_reverse_iterator(begin());
1.1721 + }
1.1722 +
1.1723 + const_reverse_iterator const_rend() const {
1.1724 + return const_reverse_iterator(begin());
1.1725 + }
1.1726 + // The symmetric cases are intentionally omitted, since they're presumed
1.1727 + // to be less common, and we don't handle them as well.
1.1728 +
1.1729 + // The following should really be templatized.
1.1730 + // The first argument should be an input iterator or
1.1731 + // forward iterator with value_type _CharT.
1.1732 + _Self& append(const _CharT* __iter, size_t __n) {
1.1733 + _RopeRep* __result =
1.1734 + _S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n);
1.1735 + _S_unref(_M_tree_ptr._M_data);
1.1736 + _M_tree_ptr._M_data = __result;
1.1737 + return *this;
1.1738 + }
1.1739 +
1.1740 + _Self& append(const _CharT* __c_string) {
1.1741 + size_t __len = _S_char_ptr_len(__c_string);
1.1742 + append(__c_string, __len);
1.1743 + return(*this);
1.1744 + }
1.1745 +
1.1746 + _Self& append(const _CharT* __s, const _CharT* __e) {
1.1747 + _RopeRep* __result =
1.1748 + _S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s);
1.1749 + _S_unref(_M_tree_ptr._M_data);
1.1750 + _M_tree_ptr._M_data = __result;
1.1751 + return *this;
1.1752 + }
1.1753 +
1.1754 + _Self& append(const_iterator __s, const_iterator __e) {
1.1755 + _STLP_ASSERT(__s._M_root == __e._M_root)
1.1756 + _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
1.1757 + _Self_destruct_ptr __appendee(_S_substring(
1.1758 + __s._M_root, __s._M_current_pos, __e._M_current_pos));
1.1759 + _RopeRep* __result =
1.1760 + _S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee);
1.1761 + _S_unref(_M_tree_ptr._M_data);
1.1762 + _M_tree_ptr._M_data = __result;
1.1763 + return *this;
1.1764 + }
1.1765 +
1.1766 + _Self& append(_CharT __c) {
1.1767 + _RopeRep* __result =
1.1768 + _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1);
1.1769 + _S_unref(_M_tree_ptr._M_data);
1.1770 + _M_tree_ptr._M_data = __result;
1.1771 + return *this;
1.1772 + }
1.1773 +
1.1774 + _Self& append() { return append(_CharT()); } // XXX why?
1.1775 +
1.1776 + _Self& append(const _Self& __y) {
1.1777 + _STLP_ASSERT(__y.get_allocator() == get_allocator())
1.1778 + _RopeRep* __result = _S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1.1779 + _S_unref(_M_tree_ptr._M_data);
1.1780 + _M_tree_ptr._M_data = __result;
1.1781 + return *this;
1.1782 + }
1.1783 +
1.1784 + _Self& append(size_t __n, _CharT __c) {
1.1785 + rope<_CharT,_Alloc> __last(__n, __c);
1.1786 + return append(__last);
1.1787 + }
1.1788 +
1.1789 + void swap(_Self& __b) {
1.1790 + _STLP_ASSERT(get_allocator() == __b.get_allocator())
1.1791 + _RopeRep* __tmp = _M_tree_ptr._M_data;
1.1792 + _M_tree_ptr._M_data = __b._M_tree_ptr._M_data;
1.1793 + __b._M_tree_ptr._M_data = __tmp;
1.1794 + }
1.1795 +
1.1796 +
1.1797 +protected:
1.1798 + // Result is included in refcount.
1.1799 + static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1.1800 + size_t __pos2, _RopeRep* __r) {
1.1801 + if (0 == __old) { _S_ref(__r); return __r; }
1.1802 + _Self_destruct_ptr __left(
1.1803 + _S_substring(__old, 0, __pos1));
1.1804 + _Self_destruct_ptr __right(
1.1805 + _S_substring(__old, __pos2, __old->_M_size._M_data));
1.1806 + _STLP_MPWFIX_TRY //*TY 06/01/2000 -
1.1807 + _RopeRep* __result;
1.1808 +
1.1809 + if (0 == __r) {
1.1810 + __result = _S_concat_rep(__left, __right);
1.1811 + } else {
1.1812 + _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
1.1813 + _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
1.1814 + __result = _S_concat_rep(__left_result, __right);
1.1815 + }
1.1816 + return __result;
1.1817 + _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
1.1818 + }
1.1819 +
1.1820 +public:
1.1821 + void insert(size_t __p, const _Self& __r) {
1.1822 + _RopeRep* __result =
1.1823 + replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data);
1.1824 + _STLP_ASSERT(get_allocator() == __r.get_allocator())
1.1825 + _S_unref(_M_tree_ptr._M_data);
1.1826 + _M_tree_ptr._M_data = __result;
1.1827 + }
1.1828 +
1.1829 + void insert(size_t __p, size_t __n, _CharT __c) {
1.1830 + rope<_CharT,_Alloc> __r(__n,__c);
1.1831 + insert(__p, __r);
1.1832 + }
1.1833 +
1.1834 + void insert(size_t __p, const _CharT* __i, size_t __n) {
1.1835 + _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
1.1836 + _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
1.1837 + _Self_destruct_ptr __left_result(
1.1838 + _S_concat_char_iter(__left, __i, __n));
1.1839 + // _S_ destr_concat_char_iter should be safe here.
1.1840 + // But as it stands it's probably not a win, since __left
1.1841 + // is likely to have additional references.
1.1842 + _RopeRep* __result = _S_concat_rep(__left_result, __right);
1.1843 + _S_unref(_M_tree_ptr._M_data);
1.1844 + _M_tree_ptr._M_data = __result;
1.1845 + }
1.1846 +
1.1847 + void insert(size_t __p, const _CharT* __c_string) {
1.1848 + insert(__p, __c_string, _S_char_ptr_len(__c_string));
1.1849 + }
1.1850 +
1.1851 + void insert(size_t __p, _CharT __c) {
1.1852 + insert(__p, &__c, 1);
1.1853 + }
1.1854 +
1.1855 + void insert(size_t __p) {
1.1856 + _CharT __c = _CharT();
1.1857 + insert(__p, &__c, 1);
1.1858 + }
1.1859 +
1.1860 + void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1.1861 + _Self __r(__i, __j);
1.1862 + insert(__p, __r);
1.1863 + }
1.1864 +
1.1865 + void insert(size_t __p, const const_iterator& __i,
1.1866 + const const_iterator& __j) {
1.1867 + _Self __r(__i, __j);
1.1868 + insert(__p, __r);
1.1869 + }
1.1870 +
1.1871 + void insert(size_t __p, const iterator& __i,
1.1872 + const iterator& __j) {
1.1873 + _Self __r(__i, __j);
1.1874 + insert(__p, __r);
1.1875 + }
1.1876 +
1.1877 + // (position, length) versions of replace operations:
1.1878 +
1.1879 + void replace(size_t __p, size_t __n, const _Self& __r) {
1.1880 + _RopeRep* __result =
1.1881 + replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data);
1.1882 + _S_unref(_M_tree_ptr._M_data);
1.1883 + _M_tree_ptr._M_data = __result;
1.1884 + }
1.1885 +
1.1886 + void replace(size_t __p, size_t __n,
1.1887 + const _CharT* __i, size_t __i_len) {
1.1888 + _Self __r(__i, __i_len);
1.1889 + replace(__p, __n, __r);
1.1890 + }
1.1891 +
1.1892 + void replace(size_t __p, size_t __n, _CharT __c) {
1.1893 + _Self __r(__c);
1.1894 + replace(__p, __n, __r);
1.1895 + }
1.1896 +
1.1897 + void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1.1898 + _Self __r(__c_string);
1.1899 + replace(__p, __n, __r);
1.1900 + }
1.1901 +
1.1902 + void replace(size_t __p, size_t __n,
1.1903 + const _CharT* __i, const _CharT* __j) {
1.1904 + _Self __r(__i, __j);
1.1905 + replace(__p, __n, __r);
1.1906 + }
1.1907 +
1.1908 + void replace(size_t __p, size_t __n,
1.1909 + const const_iterator& __i, const const_iterator& __j) {
1.1910 + _Self __r(__i, __j);
1.1911 + replace(__p, __n, __r);
1.1912 + }
1.1913 +
1.1914 + void replace(size_t __p, size_t __n,
1.1915 + const iterator& __i, const iterator& __j) {
1.1916 + _Self __r(__i, __j);
1.1917 + replace(__p, __n, __r);
1.1918 + }
1.1919 +
1.1920 + // Single character variants:
1.1921 + void replace(size_t __p, _CharT __c) {
1.1922 + iterator __i(this, __p);
1.1923 + *__i = __c;
1.1924 + }
1.1925 +
1.1926 + void replace(size_t __p, const _Self& __r) {
1.1927 + replace(__p, 1, __r);
1.1928 + }
1.1929 +
1.1930 + void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1.1931 + replace(__p, 1, __i, __i_len);
1.1932 + }
1.1933 +
1.1934 + void replace(size_t __p, const _CharT* __c_string) {
1.1935 + replace(__p, 1, __c_string);
1.1936 + }
1.1937 +
1.1938 + void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1.1939 + replace(__p, 1, __i, __j);
1.1940 + }
1.1941 +
1.1942 + void replace(size_t __p, const const_iterator& __i,
1.1943 + const const_iterator& __j) {
1.1944 + replace(__p, 1, __i, __j);
1.1945 + }
1.1946 +
1.1947 + void replace(size_t __p, const iterator& __i,
1.1948 + const iterator& __j) {
1.1949 + replace(__p, 1, __i, __j);
1.1950 + }
1.1951 +
1.1952 + // Erase, (position, size) variant.
1.1953 + void erase(size_t __p, size_t __n) {
1.1954 + _RopeRep* __result = replace(_M_tree_ptr._M_data, __p, __p + __n, 0);
1.1955 + _S_unref(_M_tree_ptr._M_data);
1.1956 + _M_tree_ptr._M_data = __result;
1.1957 + }
1.1958 +
1.1959 + // Erase, single character
1.1960 + void erase(size_t __p) {
1.1961 + erase(__p, __p + 1);
1.1962 + }
1.1963 +
1.1964 + // Insert, iterator variants.
1.1965 + iterator insert(const iterator& __p, const _Self& __r)
1.1966 + { insert(__p.index(), __r); return __p; }
1.1967 + iterator insert(const iterator& __p, size_t __n, _CharT __c)
1.1968 + { insert(__p.index(), __n, __c); return __p; }
1.1969 + iterator insert(const iterator& __p, _CharT __c)
1.1970 + { insert(__p.index(), __c); return __p; }
1.1971 + iterator insert(const iterator& __p )
1.1972 + { insert(__p.index()); return __p; }
1.1973 + iterator insert(const iterator& __p, const _CharT* c_string)
1.1974 + { insert(__p.index(), c_string); return __p; }
1.1975 + iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1.1976 + { insert(__p.index(), __i, __n); return __p; }
1.1977 + iterator insert(const iterator& __p, const _CharT* __i,
1.1978 + const _CharT* __j)
1.1979 + { insert(__p.index(), __i, __j); return __p; }
1.1980 + iterator insert(const iterator& __p,
1.1981 + const const_iterator& __i, const const_iterator& __j)
1.1982 + { insert(__p.index(), __i, __j); return __p; }
1.1983 + iterator insert(const iterator& __p,
1.1984 + const iterator& __i, const iterator& __j)
1.1985 + { insert(__p.index(), __i, __j); return __p; }
1.1986 +
1.1987 + // Replace, range variants.
1.1988 + void replace(const iterator& __p, const iterator& __q,
1.1989 + const _Self& __r)
1.1990 + { replace(__p.index(), __q.index() - __p.index(), __r); }
1.1991 + void replace(const iterator& __p, const iterator& __q, _CharT __c)
1.1992 + { replace(__p.index(), __q.index() - __p.index(), __c); }
1.1993 + void replace(const iterator& __p, const iterator& __q,
1.1994 + const _CharT* __c_string)
1.1995 + { replace(__p.index(), __q.index() - __p.index(), __c_string); }
1.1996 + void replace(const iterator& __p, const iterator& __q,
1.1997 + const _CharT* __i, size_t __n)
1.1998 + { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
1.1999 + void replace(const iterator& __p, const iterator& __q,
1.2000 + const _CharT* __i, const _CharT* __j)
1.2001 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.2002 + void replace(const iterator& __p, const iterator& __q,
1.2003 + const const_iterator& __i, const const_iterator& __j)
1.2004 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.2005 + void replace(const iterator& __p, const iterator& __q,
1.2006 + const iterator& __i, const iterator& __j)
1.2007 + { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
1.2008 +
1.2009 + // Replace, iterator variants.
1.2010 + void replace(const iterator& __p, const _Self& __r)
1.2011 + { replace(__p.index(), __r); }
1.2012 + void replace(const iterator& __p, _CharT __c)
1.2013 + { replace(__p.index(), __c); }
1.2014 + void replace(const iterator& __p, const _CharT* __c_string)
1.2015 + { replace(__p.index(), __c_string); }
1.2016 + void replace(const iterator& __p, const _CharT* __i, size_t __n)
1.2017 + { replace(__p.index(), __i, __n); }
1.2018 + void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
1.2019 + { replace(__p.index(), __i, __j); }
1.2020 + void replace(const iterator& __p, const_iterator __i,
1.2021 + const_iterator __j)
1.2022 + { replace(__p.index(), __i, __j); }
1.2023 + void replace(const iterator& __p, iterator __i, iterator __j)
1.2024 + { replace(__p.index(), __i, __j); }
1.2025 +
1.2026 + // Iterator and range variants of erase
1.2027 + iterator erase(const iterator& __p, const iterator& __q) {
1.2028 + size_t __p_index = __p.index();
1.2029 + erase(__p_index, __q.index() - __p_index);
1.2030 + return iterator(this, __p_index);
1.2031 + }
1.2032 + iterator erase(const iterator& __p) {
1.2033 + size_t __p_index = __p.index();
1.2034 + erase(__p_index, 1);
1.2035 + return iterator(this, __p_index);
1.2036 + }
1.2037 +
1.2038 + _Self substr(size_t __start, size_t __len = 1) const {
1.2039 + return rope<_CharT,_Alloc>(
1.2040 + _S_substring(_M_tree_ptr._M_data, __start, __start + __len));
1.2041 + }
1.2042 +
1.2043 + _Self substr(iterator __start, iterator __end) const {
1.2044 + return rope<_CharT,_Alloc>(
1.2045 + _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1.2046 + }
1.2047 +
1.2048 + _Self substr(iterator __start) const {
1.2049 + size_t __pos = __start.index();
1.2050 + return rope<_CharT,_Alloc>(
1.2051 + _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1.2052 + }
1.2053 +
1.2054 + _Self substr(const_iterator __start, const_iterator __end) const {
1.2055 + // This might eventually take advantage of the cache in the
1.2056 + // iterator.
1.2057 + return rope<_CharT,_Alloc>(
1.2058 + _S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
1.2059 + }
1.2060 +
1.2061 + rope<_CharT,_Alloc> substr(const_iterator __start) {
1.2062 + size_t __pos = __start.index();
1.2063 + return rope<_CharT,_Alloc>(
1.2064 + _S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
1.2065 + }
1.2066 +
1.2067 + enum { npos = -1 };
1.2068 +
1.2069 + // static const size_type npos;
1.2070 +
1.2071 + size_type find(_CharT __c, size_type __pos = 0) const;
1.2072 + size_type find(const _CharT* __s, size_type __pos = 0) const {
1.2073 + size_type __result_pos;
1.2074 + const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
1.2075 + __s, __s + _S_char_ptr_len(__s));
1.2076 + __result_pos = __result.index();
1.2077 +# ifndef _STLP_OLD_ROPE_SEMANTICS
1.2078 + if (__result_pos == size()) __result_pos = npos;
1.2079 +# endif
1.2080 + return __result_pos;
1.2081 + }
1.2082 +
1.2083 + iterator mutable_begin() {
1.2084 + return(iterator(this, 0));
1.2085 + }
1.2086 +
1.2087 + iterator mutable_end() {
1.2088 + return(iterator(this, size()));
1.2089 + }
1.2090 +
1.2091 + reverse_iterator mutable_rbegin() {
1.2092 + return reverse_iterator(mutable_end());
1.2093 + }
1.2094 +
1.2095 + reverse_iterator mutable_rend() {
1.2096 + return reverse_iterator(mutable_begin());
1.2097 + }
1.2098 +
1.2099 + reference mutable_reference_at(size_type __pos) {
1.2100 + return reference(this, __pos);
1.2101 + }
1.2102 +
1.2103 +# ifdef __STD_STUFF
1.2104 + reference operator[] (size_type __pos) {
1.2105 + return reference(this, __pos);
1.2106 + }
1.2107 +
1.2108 + reference at(size_type __pos) {
1.2109 + // if (__pos >= size()) throw out_of_range; // XXX
1.2110 + return (*this)[__pos];
1.2111 + }
1.2112 +
1.2113 + void resize(size_type, _CharT) {}
1.2114 + void resize(size_type) {}
1.2115 + void reserve(size_type = 0) {}
1.2116 + size_type capacity() const {
1.2117 + return max_size();
1.2118 + }
1.2119 +
1.2120 + // Stuff below this line is dangerous because it's error prone.
1.2121 + // I would really like to get rid of it.
1.2122 + // copy function with funny arg ordering.
1.2123 + size_type copy(_CharT* __buffer, size_type __n,
1.2124 + size_type __pos = 0) const {
1.2125 + return copy(__pos, __n, __buffer);
1.2126 + }
1.2127 +
1.2128 + iterator end() { return mutable_end(); }
1.2129 +
1.2130 + iterator begin() { return mutable_begin(); }
1.2131 +
1.2132 + reverse_iterator rend() { return mutable_rend(); }
1.2133 +
1.2134 + reverse_iterator rbegin() { return mutable_rbegin(); }
1.2135 +
1.2136 +# else
1.2137 +
1.2138 + const_iterator end() { return const_end(); }
1.2139 +
1.2140 + const_iterator begin() { return const_begin(); }
1.2141 +
1.2142 + const_reverse_iterator rend() { return const_rend(); }
1.2143 +
1.2144 + const_reverse_iterator rbegin() { return const_rbegin(); }
1.2145 +
1.2146 +# endif
1.2147 +
1.2148 + __ROPE_DEFINE_ALLOCS(_Alloc, _M_tree_ptr)
1.2149 + };
1.2150 +
1.2151 +# undef __ROPE_DEFINE_ALLOC
1.2152 +# undef __ROPE_DEFINE_ALLOCS
1.2153 +
1.2154 +template <class _CharT, class _Alloc>
1.2155 +inline _CharT
1.2156 +_Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
1.2157 +{
1.2158 + return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n);
1.2159 +}
1.2160 +
1.2161 +template <class _CharT, class _Alloc>
1.2162 +inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2163 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2164 + return (__x._M_current_pos == __y._M_current_pos &&
1.2165 + __x._M_root == __y._M_root);
1.2166 +}
1.2167 +
1.2168 +template <class _CharT, class _Alloc>
1.2169 +inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2170 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2171 + return (__x._M_current_pos < __y._M_current_pos);
1.2172 +}
1.2173 +
1.2174 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2175 +
1.2176 +template <class _CharT, class _Alloc>
1.2177 +inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2178 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2179 + return !(__x == __y);
1.2180 +}
1.2181 +
1.2182 +template <class _CharT, class _Alloc>
1.2183 +inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2184 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2185 + return __y < __x;
1.2186 +}
1.2187 +
1.2188 +template <class _CharT, class _Alloc>
1.2189 +inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2190 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2191 + return !(__y < __x);
1.2192 +}
1.2193 +
1.2194 +template <class _CharT, class _Alloc>
1.2195 +inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2196 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2197 + return !(__x < __y);
1.2198 +}
1.2199 +
1.2200 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2201 +
1.2202 +template <class _CharT, class _Alloc>
1.2203 +inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
1.2204 + const _Rope_const_iterator<_CharT,_Alloc>& __y) {
1.2205 + return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
1.2206 +}
1.2207 +
1.2208 +#if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
1.2209 +template <class _CharT, class _Alloc>
1.2210 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2211 +operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
1.2212 + return _Rope_const_iterator<_CharT,_Alloc>(
1.2213 + __x._M_root, __x._M_current_pos - __n);
1.2214 +}
1.2215 +# endif
1.2216 +
1.2217 +template <class _CharT, class _Alloc>
1.2218 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2219 +operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
1.2220 + return _Rope_const_iterator<_CharT,_Alloc>(
1.2221 + __x._M_root, __x._M_current_pos + __n);
1.2222 +}
1.2223 +
1.2224 +template <class _CharT, class _Alloc>
1.2225 +inline _Rope_const_iterator<_CharT,_Alloc>
1.2226 +operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
1.2227 + return _Rope_const_iterator<_CharT,_Alloc>(
1.2228 + __x._M_root, __x._M_current_pos + __n);
1.2229 +}
1.2230 +
1.2231 +template <class _CharT, class _Alloc>
1.2232 +inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2233 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2234 + return (__x._M_current_pos == __y._M_current_pos &&
1.2235 + __x._M_root_rope == __y._M_root_rope);
1.2236 +}
1.2237 +
1.2238 +template <class _CharT, class _Alloc>
1.2239 +inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2240 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2241 + return (__x._M_current_pos < __y._M_current_pos);
1.2242 +}
1.2243 +
1.2244 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2245 +
1.2246 +template <class _CharT, class _Alloc>
1.2247 +inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2248 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2249 + return !(__x == __y);
1.2250 +}
1.2251 +
1.2252 +template <class _CharT, class _Alloc>
1.2253 +inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2254 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2255 + return __y < __x;
1.2256 +}
1.2257 +
1.2258 +template <class _CharT, class _Alloc>
1.2259 +inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2260 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2261 + return !(__y < __x);
1.2262 +}
1.2263 +
1.2264 +template <class _CharT, class _Alloc>
1.2265 +inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
1.2266 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2267 + return !(__x < __y);
1.2268 +}
1.2269 +
1.2270 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2271 +
1.2272 +template <class _CharT, class _Alloc>
1.2273 +inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2274 + const _Rope_iterator<_CharT,_Alloc>& __y) {
1.2275 + return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
1.2276 +}
1.2277 +
1.2278 +#if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
1.2279 +template <class _CharT, class _Alloc>
1.2280 +inline _Rope_iterator<_CharT,_Alloc>
1.2281 +operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2282 + ptrdiff_t __n) {
1.2283 + return _Rope_iterator<_CharT,_Alloc>(
1.2284 + __x._M_root_rope, __x._M_current_pos - __n);
1.2285 +}
1.2286 +# endif
1.2287 +
1.2288 +template <class _CharT, class _Alloc>
1.2289 +inline _Rope_iterator<_CharT,_Alloc>
1.2290 +operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
1.2291 + ptrdiff_t __n) {
1.2292 + return _Rope_iterator<_CharT,_Alloc>(
1.2293 + __x._M_root_rope, __x._M_current_pos + __n);
1.2294 +}
1.2295 +
1.2296 +template <class _CharT, class _Alloc>
1.2297 +inline _Rope_iterator<_CharT,_Alloc>
1.2298 +operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
1.2299 + return _Rope_iterator<_CharT,_Alloc>(
1.2300 + __x._M_root_rope, __x._M_current_pos + __n);
1.2301 +}
1.2302 +
1.2303 +template <class _CharT, class _Alloc>
1.2304 +inline
1.2305 +rope<_CharT,_Alloc>
1.2306 +operator+ (const rope<_CharT,_Alloc>& __left,
1.2307 + const rope<_CharT,_Alloc>& __right)
1.2308 +{
1.2309 + _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
1.2310 + return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
1.2311 + // Inlining this should make it possible to keep __left and
1.2312 + // __right in registers.
1.2313 +}
1.2314 +
1.2315 +template <class _CharT, class _Alloc>
1.2316 +inline
1.2317 +rope<_CharT,_Alloc>&
1.2318 +operator+= (rope<_CharT,_Alloc>& __left,
1.2319 + const rope<_CharT,_Alloc>& __right)
1.2320 +{
1.2321 + __left.append(__right);
1.2322 + return __left;
1.2323 +}
1.2324 +
1.2325 +template <class _CharT, class _Alloc>
1.2326 +inline
1.2327 +rope<_CharT,_Alloc>
1.2328 +operator+ (const rope<_CharT,_Alloc>& __left,
1.2329 + const _CharT* __right) {
1.2330 + size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
1.2331 + return rope<_CharT,_Alloc>(
1.2332 + rope<_CharT,_Alloc>::_S_concat_char_iter(
1.2333 + __left._M_tree_ptr._M_data, __right, __rlen));
1.2334 +}
1.2335 +
1.2336 +template <class _CharT, class _Alloc>
1.2337 +inline
1.2338 +rope<_CharT,_Alloc>&
1.2339 +operator+= (rope<_CharT,_Alloc>& __left,
1.2340 + const _CharT* __right) {
1.2341 + __left.append(__right);
1.2342 + return __left;
1.2343 +}
1.2344 +
1.2345 +template <class _CharT, class _Alloc>
1.2346 +inline
1.2347 +rope<_CharT,_Alloc>
1.2348 +operator+ (const rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
1.2349 + return rope<_CharT,_Alloc>(
1.2350 + rope<_CharT,_Alloc>::_S_concat_char_iter(
1.2351 + __left._M_tree_ptr._M_data, &__right, 1));
1.2352 +}
1.2353 +
1.2354 +template <class _CharT, class _Alloc>
1.2355 +inline
1.2356 +rope<_CharT,_Alloc>&
1.2357 +operator+= (rope<_CharT,_Alloc>& __left, _STLP_SIMPLE_TYPE(_CharT) __right) {
1.2358 + __left.append(__right);
1.2359 + return __left;
1.2360 +}
1.2361 +
1.2362 +template <class _CharT, class _Alloc>
1.2363 +inline bool
1.2364 +operator< (const rope<_CharT,_Alloc>& __left,
1.2365 + const rope<_CharT,_Alloc>& __right) {
1.2366 + return __left.compare(__right) < 0;
1.2367 +}
1.2368 +
1.2369 +template <class _CharT, class _Alloc>
1.2370 +inline bool
1.2371 +operator== (const rope<_CharT,_Alloc>& __left,
1.2372 + const rope<_CharT,_Alloc>& __right) {
1.2373 + return __left.compare(__right) == 0;
1.2374 +}
1.2375 +
1.2376 +#ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
1.2377 +
1.2378 +template <class _CharT, class _Alloc>
1.2379 +inline bool
1.2380 +operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2381 + return !(__x == __y);
1.2382 +}
1.2383 +
1.2384 +template <class _CharT, class _Alloc>
1.2385 +inline bool
1.2386 +operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2387 + return __y < __x;
1.2388 +}
1.2389 +
1.2390 +template <class _CharT, class _Alloc>
1.2391 +inline bool
1.2392 +operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2393 + return !(__y < __x);
1.2394 +}
1.2395 +
1.2396 +template <class _CharT, class _Alloc>
1.2397 +inline bool
1.2398 +operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
1.2399 + return !(__x < __y);
1.2400 +}
1.2401 +
1.2402 +template <class _CharT, class _Alloc>
1.2403 +inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
1.2404 + const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
1.2405 + return !(__x == __y);
1.2406 +}
1.2407 +
1.2408 +#endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
1.2409 +
1.2410 +template <class _CharT, class _Alloc>
1.2411 +inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
1.2412 + const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
1.2413 + return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
1.2414 +}
1.2415 +
1.2416 +#ifdef _STLP_USE_NEW_IOSTREAMS
1.2417 +template<class _CharT, class _Traits, class _Alloc>
1.2418 +basic_ostream<_CharT, _Traits>& operator<< (
1.2419 + basic_ostream<_CharT, _Traits>& __o,
1.2420 + const rope<_CharT, _Alloc>& __r);
1.2421 +#elif ! defined (_STLP_USE_NO_IOSTREAMS)
1.2422 +template<class _CharT, class _Alloc>
1.2423 +ostream& operator<< (ostream& __o, const rope<_CharT,_Alloc>& __r);
1.2424 +#endif
1.2425 +
1.2426 +typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
1.2427 +# ifdef _STLP_HAS_WCHAR_T
1.2428 +typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
1.2429 +# endif
1.2430 +
1.2431 +inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
1.2432 +{
1.2433 + return __c.mutable_reference_at(__i);
1.2434 +}
1.2435 +
1.2436 +# ifdef _STLP_HAS_WCHAR_T
1.2437 +inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
1.2438 +{
1.2439 + return __c.mutable_reference_at(__i);
1.2440 +}
1.2441 +# endif
1.2442 +
1.2443 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
1.2444 +
1.2445 +template <class _CharT, class _Alloc>
1.2446 +inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
1.2447 + __x.swap(__y);
1.2448 +}
1.2449 +#else
1.2450 +
1.2451 +inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
1.2452 +# ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
1.2453 +inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
1.2454 +# endif
1.2455 +
1.2456 +#endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
1.2457 +
1.2458 +
1.2459 +// Hash functions should probably be revisited later:
1.2460 +_STLP_TEMPLATE_NULL struct hash<crope>
1.2461 +{
1.2462 + size_t operator()(const crope& __str) const
1.2463 + {
1.2464 + size_t _p_size = __str.size();
1.2465 +
1.2466 + if (0 == _p_size) return 0;
1.2467 + return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
1.2468 + }
1.2469 +};
1.2470 +
1.2471 +# ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
1.2472 +_STLP_TEMPLATE_NULL struct hash<wrope>
1.2473 +{
1.2474 + size_t operator()(const wrope& __str) const
1.2475 + {
1.2476 + size_t _p_size = __str.size();
1.2477 +
1.2478 + if (0 == _p_size) return 0;
1.2479 + return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
1.2480 + }
1.2481 +};
1.2482 +#endif
1.2483 +
1.2484 +#ifndef _STLP_MSVC
1.2485 +// I couldn't get this to work with VC++
1.2486 +template<class _CharT,class _Alloc>
1.2487 +void
1.2488 +_Rope_rotate(_Rope_iterator<_CharT,_Alloc> __first,
1.2489 + _Rope_iterator<_CharT,_Alloc> __middle,
1.2490 + _Rope_iterator<_CharT,_Alloc> __last);
1.2491 +
1.2492 +#if !defined(__GNUC__)
1.2493 +// Appears to confuse g++
1.2494 +inline void rotate(_Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __first,
1.2495 + _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __middle,
1.2496 + _Rope_iterator<char,_STLP_DEFAULT_ALLOCATOR(char) > __last) {
1.2497 + _Rope_rotate(__first, __middle, __last);
1.2498 +}
1.2499 +#endif
1.2500 +
1.2501 +#endif
1.2502 +
1.2503 +template <class _CharT, class _Alloc>
1.2504 +inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const
1.2505 +{
1.2506 + if (_M_current_valid) {
1.2507 + return _M_current;
1.2508 + } else {
1.2509 + return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
1.2510 + }
1.2511 +}
1.2512 +_STLP_END_NAMESPACE
1.2513 +
1.2514 +# if !defined (_STLP_LINK_TIME_INSTANTIATION)
1.2515 +# include <stl/_rope.c>
1.2516 +# endif
1.2517 +
1.2518 +# endif /* _STLP_INTERNAL_ROPE_H */
1.2519 +
1.2520 +// Local Variables:
1.2521 +// mode:C++
1.2522 +// End: