Main Page   Namespace List   Class Hierarchy   Alphabetical List   Compound List   File List   Namespace Members   Compound Members   File Members  

stl_alloc.h

Go to the documentation of this file.
00001 /*
00002  * Copyright (c) 1996-1997
00003  * Silicon Graphics Computer Systems, Inc.
00004  *
00005  * Permission to use, copy, modify, distribute and sell this software
00006  * and its documentation for any purpose is hereby granted without fee,
00007  * provided that the above copyright notice appear in all copies and
00008  * that both that copyright notice and this permission notice appear
00009  * in supporting documentation.  Silicon Graphics makes no
00010  * representations about the suitability of this software for any
00011  * purpose.  It is provided "as is" without express or implied warranty.
00012  */
00013 
00014 /* NOTE: This is an internal header file, included by other STL headers.
00015  *   You should not attempt to use it directly.
00016  */
00017 
00018 #ifndef __SGI_STL_INTERNAL_ALLOC_H
00019 #define __SGI_STL_INTERNAL_ALLOC_H
00020 
00021 // This implements some standard node allocators.  These are
00022 // NOT the same as the allocators in the C++ draft standard or in
00023 // in the original STL.  They do not encapsulate different pointer
00024 // types; indeed we assume that there is only one pointer type.
00025 // The allocation primitives are intended to allocate individual objects,
00026 // not larger arenas as with the original STL allocators.
00027 
00028 #include <bits/functexcept.h>   // for __throw_bad_alloc
00029 #include <bits/std_cstddef.h>
00030 #include <bits/std_cstdlib.h>
00031 #include <bits/std_cstring.h>
00032 #include <bits/std_cassert.h>
00033 #ifndef __RESTRICT
00034 #  define __RESTRICT
00035 #endif
00036 
00037 #ifdef __STL_THREADS
00038 # include <bits/stl_threads.h>
00039 # define __NODE_ALLOCATOR_THREADS true
00040 # ifdef __STL_SGI_THREADS
00041   // We test whether threads are in use before locking.
00042   // Perhaps this should be moved into stl_threads.h, but that
00043   // probably makes it harder to avoid the procedure call when
00044   // it isn't needed.
00045     extern "C" {
00046       extern int __us_rsthread_malloc;
00047     }
00048     // The above is copied from malloc.h.  Including <malloc.h>
00049     // would be cleaner but fails with certain levels of standard
00050     // conformance.
00051 #   define __NODE_ALLOCATOR_LOCK if (threads && __us_rsthread_malloc) \
00052                 { _S_node_allocator_lock._M_acquire_lock(); }
00053 #   define __NODE_ALLOCATOR_UNLOCK if (threads && __us_rsthread_malloc) \
00054                 { _S_node_allocator_lock._M_release_lock(); }
00055 # else /* !__STL_SGI_THREADS */
00056 #   define __NODE_ALLOCATOR_LOCK \
00057         { if (threads) _S_node_allocator_lock._M_acquire_lock(); }
00058 #   define __NODE_ALLOCATOR_UNLOCK \
00059         { if (threads) _S_node_allocator_lock._M_release_lock(); }
00060 # endif
00061 #else
00062 //  Thread-unsafe
00063 #   define __NODE_ALLOCATOR_LOCK
00064 #   define __NODE_ALLOCATOR_UNLOCK
00065 #   define __NODE_ALLOCATOR_THREADS false
00066 #endif
00067 
00068 namespace std
00069 {
00070 
00071 // Malloc-based allocator.  Typically slower than default alloc below.
00072 // Typically thread-safe and more storage efficient.
00073 template <int __inst>
00074 class __malloc_alloc_template {
00075 
00076 private:
00077 
00078   static void* _S_oom_malloc(size_t);
00079   static void* _S_oom_realloc(void*, size_t);
00080   static void (* __malloc_alloc_oom_handler)();
00081 
00082 public:
00083 
00084   static void* allocate(size_t __n)
00085   {
00086     void* __result = malloc(__n);
00087     if (0 == __result) __result = _S_oom_malloc(__n);
00088     return __result;
00089   }
00090 
00091   static void deallocate(void* __p, size_t /* __n */)
00092   {
00093     free(__p);
00094   }
00095 
00096   static void* reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
00097   {
00098     void* __result = realloc(__p, __new_sz);
00099     if (0 == __result) __result = _S_oom_realloc(__p, __new_sz);
00100     return __result;
00101   }
00102 
00103   static void (* __set_malloc_handler(void (*__f)()))()
00104   {
00105     void (* __old)() = __malloc_alloc_oom_handler;
00106     __malloc_alloc_oom_handler = __f;
00107     return(__old);
00108   }
00109 
00110 };
00111 
00112 // malloc_alloc out-of-memory handling
00113 
00114 template <int __inst>
00115 void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
00116 
00117 template <int __inst>
00118 void*
00119 __malloc_alloc_template<__inst>::_S_oom_malloc(size_t __n)
00120 {
00121     void (* __my_malloc_handler)();
00122     void* __result;
00123 
00124     for (;;) {
00125         __my_malloc_handler = __malloc_alloc_oom_handler;
00126         if (0 == __my_malloc_handler) { std::__throw_bad_alloc(); }
00127         (*__my_malloc_handler)();
00128         __result = malloc(__n);
00129         if (__result) return(__result);
00130     }
00131 }
00132 
00133 template <int __inst>
00134 void* __malloc_alloc_template<__inst>::_S_oom_realloc(void* __p, size_t __n)
00135 {
00136     void (* __my_malloc_handler)();
00137     void* __result;
00138 
00139     for (;;) {
00140         __my_malloc_handler = __malloc_alloc_oom_handler;
00141         if (0 == __my_malloc_handler) { std::__throw_bad_alloc(); }
00142         (*__my_malloc_handler)();
00143         __result = realloc(__p, __n);
00144         if (__result) return(__result);
00145     }
00146 }
00147 
00148 typedef __malloc_alloc_template<0> malloc_alloc;
00149 
00150 template<class _Tp, class _Alloc>
00151 class simple_alloc {
00152 
00153 public:
00154     static _Tp* allocate(size_t __n)
00155       { return 0 == __n ? 0 : (_Tp*) _Alloc::allocate(__n * sizeof (_Tp)); }
00156     static _Tp* allocate(void)
00157       { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
00158     static void deallocate(_Tp* __p, size_t __n)
00159       { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
00160     static void deallocate(_Tp* __p)
00161       { _Alloc::deallocate(__p, sizeof (_Tp)); }
00162 };
00163 
00164 // Allocator adaptor to check size arguments for debugging.
00165 // Reports errors using assert.  Checking can be disabled with
00166 // NDEBUG, but it's far better to just use the underlying allocator
00167 // instead when no checking is desired.
00168 // There is some evidence that this can confuse Purify.
00169 template <class _Alloc>
00170 class debug_alloc {
00171 
00172 private:
00173 
00174   enum {_S_extra = 8};  // Size of space used to store size.  Note
00175                         // that this must be large enough to preserve
00176                         // alignment.
00177 
00178 public:
00179 
00180   static void* allocate(size_t __n)
00181   {
00182     char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
00183     *(size_t*)__result = __n;
00184     return __result + (int) _S_extra;
00185   }
00186 
00187   static void deallocate(void* __p, size_t __n)
00188   {
00189     char* __real_p = (char*)__p - (int) _S_extra;
00190     assert(*(size_t*)__real_p == __n);
00191     _Alloc::deallocate(__real_p, __n + (int) _S_extra);
00192   }
00193 
00194   static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz)
00195   {
00196     char* __real_p = (char*)__p - (int) _S_extra;
00197     assert(*(size_t*)__real_p == __old_sz);
00198     char* __result = (char*)
00199       _Alloc::reallocate(__real_p, __old_sz + (int) _S_extra,
00200                                    __new_sz + (int) _S_extra);
00201     *(size_t*)__result = __new_sz;
00202     return __result + (int) _S_extra;
00203   }
00204 
00205 };
00206 
00207 
00208 # ifdef __USE_MALLOC
00209 
00210 typedef malloc_alloc alloc;
00211 typedef malloc_alloc single_client_alloc;
00212 
00213 # else
00214 
00215 
00216 // Default node allocator.
00217 // With a reasonable compiler, this should be roughly as fast as the
00218 // original STL class-specific allocators, but with less fragmentation.
00219 // Default_alloc_template parameters are experimental and MAY
00220 // DISAPPEAR in the future.  Clients should just use alloc for now.
00221 //
00222 // Important implementation properties:
00223 // 1. If the client request an object of size > _MAX_BYTES, the resulting
00224 //    object will be obtained directly from malloc.
00225 // 2. In all other cases, we allocate an object of size exactly
00226 //    _S_round_up(requested_size).  Thus the client has enough size
00227 //    information that we can return the object to the proper free list
00228 //    without permanently losing part of the object.
00229 //
00230 
00231 // The first template parameter specifies whether more than one thread
00232 // may use this allocator.  It is safe to allocate an object from
00233 // one instance of a default_alloc and deallocate it with another
00234 // one.  This effectively transfers its ownership to the second one.
00235 // This may have undesirable effects on reference locality.
00236 // The second parameter is unreferenced and serves only to allow the
00237 // creation of multiple default_alloc instances.
00238 // Node that containers built on different allocator instances have
00239 // different types, limiting the utility of this approach.
00240 
00241 template <bool threads, int inst>
00242 class __default_alloc_template {
00243 
00244 private:
00245   // Really we should use static const int x = N
00246   // instead of enum { x = N }, but few compilers accept the former.
00247   enum {_ALIGN = 8};
00248   enum {_MAX_BYTES = 128};
00249   enum {_NFREELISTS = 16}; // _MAX_BYTES/_ALIGN
00250   static size_t
00251   _S_round_up(size_t __bytes) 
00252     { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
00253 
00254   union _Obj {
00255         union _Obj* _M_free_list_link;
00256         char _M_client_data[1];    /* The client sees this.        */
00257   };
00258 
00259   static _Obj* __STL_VOLATILE _S_free_list[]; 
00260         // Specifying a size results in duplicate def for 4.1
00261   static  size_t _S_freelist_index(size_t __bytes) {
00262         return (((__bytes) + (size_t)_ALIGN-1)/(size_t)_ALIGN - 1);
00263   }
00264 
00265   // Returns an object of size __n, and optionally adds to size __n free list.
00266   static void* _S_refill(size_t __n);
00267   // Allocates a chunk for nobjs of size size.  nobjs may be reduced
00268   // if it is inconvenient to allocate the requested number.
00269   static char* _S_chunk_alloc(size_t __size, int& __nobjs);
00270 
00271   // Chunk allocation state.
00272   static char* _S_start_free;
00273   static char* _S_end_free;
00274   static size_t _S_heap_size;
00275 
00276 # ifdef __STL_THREADS
00277     static _STL_mutex_lock _S_node_allocator_lock;
00278 # endif
00279 
00280     // It would be nice to use _STL_auto_lock here.  But we
00281     // don't need the NULL check.  And we do need a test whether
00282     // threads have actually been started.
00283     class _Lock;
00284     friend class _Lock;
00285     class _Lock {
00286         public:
00287             _Lock() { __NODE_ALLOCATOR_LOCK; }
00288             ~_Lock() { __NODE_ALLOCATOR_UNLOCK; }
00289     };
00290 
00291 public:
00292 
00293   /* __n must be > 0      */
00294   static void* allocate(size_t __n)
00295   {
00296     void* __ret = 0;
00297 
00298     if (__n > (size_t) _MAX_BYTES) {
00299       __ret = malloc_alloc::allocate(__n);
00300     }
00301     else {
00302       _Obj* __STL_VOLATILE* __my_free_list
00303           = _S_free_list + _S_freelist_index(__n);
00304       // Acquire the lock here with a constructor call.
00305       // This ensures that it is released in exit or during stack
00306       // unwinding.
00307 #     ifndef _NOTHREADS
00308       /*REFERENCED*/
00309       _Lock __lock_instance;
00310 #     endif
00311       _Obj* __RESTRICT __result = *__my_free_list;
00312       if (__result == 0)
00313         __ret = _S_refill(_S_round_up(__n));
00314       else {
00315         *__my_free_list = __result -> _M_free_list_link;
00316         __ret = __result;
00317       }
00318     }
00319 
00320     return __ret;
00321   };
00322 
00323   /* __p may not be 0 */
00324   static void deallocate(void* __p, size_t __n)
00325   {
00326     if (__n > (size_t) _MAX_BYTES)
00327       malloc_alloc::deallocate(__p, __n);
00328     else {
00329       _Obj* __STL_VOLATILE*  __my_free_list
00330           = _S_free_list + _S_freelist_index(__n);
00331       _Obj* __q = (_Obj*)__p;
00332 
00333       // acquire lock
00334 #       ifndef _NOTHREADS
00335       /*REFERENCED*/
00336       _Lock __lock_instance;
00337 #       endif /* _NOTHREADS */
00338       __q -> _M_free_list_link = *__my_free_list;
00339       *__my_free_list = __q;
00340       // lock is released here
00341     }
00342   }
00343 
00344   static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz);
00345 
00346 } ;
00347 
00348 typedef __default_alloc_template<__NODE_ALLOCATOR_THREADS, 0> alloc;
00349 typedef __default_alloc_template<false, 0> single_client_alloc;
00350 
00351 template <bool __threads, int __inst>
00352 inline bool operator==(const __default_alloc_template<__threads, __inst>&,
00353                        const __default_alloc_template<__threads, __inst>&)
00354 {
00355   return true;
00356 }
00357 
00358 template <bool __threads, int __inst>
00359 inline bool operator!=(const __default_alloc_template<__threads, __inst>&,
00360                        const __default_alloc_template<__threads, __inst>&)
00361 {
00362   return false;
00363 }
00364 
00365 
00366 
00367 /* We allocate memory in large chunks in order to avoid fragmenting     */
00368 /* the malloc heap too much.                                            */
00369 /* We assume that size is properly aligned.                             */
00370 /* We hold the allocation lock.                                         */
00371 template <bool __threads, int __inst>
00372 char*
00373 __default_alloc_template<__threads, __inst>::_S_chunk_alloc(size_t __size, 
00374                                                             int& __nobjs)
00375 {
00376     char* __result;
00377     size_t __total_bytes = __size * __nobjs;
00378     size_t __bytes_left = _S_end_free - _S_start_free;
00379 
00380     if (__bytes_left >= __total_bytes) {
00381         __result = _S_start_free;
00382         _S_start_free += __total_bytes;
00383         return(__result);
00384     } else if (__bytes_left >= __size) {
00385         __nobjs = (int)(__bytes_left/__size);
00386         __total_bytes = __size * __nobjs;
00387         __result = _S_start_free;
00388         _S_start_free += __total_bytes;
00389         return(__result);
00390     } else {
00391         size_t __bytes_to_get = 
00392       2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
00393         // Try to make use of the left-over piece.
00394         if (__bytes_left > 0) {
00395             _Obj* __STL_VOLATILE* __my_free_list =
00396                         _S_free_list + _S_freelist_index(__bytes_left);
00397 
00398             ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
00399             *__my_free_list = (_Obj*)_S_start_free;
00400         }
00401         _S_start_free = (char*)malloc(__bytes_to_get);
00402         if (0 == _S_start_free) {
00403             size_t __i;
00404             _Obj* __STL_VOLATILE* __my_free_list;
00405         _Obj* __p;
00406             // Try to make do with what we have.  That can't
00407             // hurt.  We do not try smaller requests, since that tends
00408             // to result in disaster on multi-process machines.
00409             for (__i = __size;
00410                  __i <= (size_t) _MAX_BYTES;
00411                  __i += (size_t) _ALIGN) {
00412                 __my_free_list = _S_free_list + _S_freelist_index(__i);
00413                 __p = *__my_free_list;
00414                 if (0 != __p) {
00415                     *__my_free_list = __p -> _M_free_list_link;
00416                     _S_start_free = (char*)__p;
00417                     _S_end_free = _S_start_free + __i;
00418                     return(_S_chunk_alloc(__size, __nobjs));
00419                     // Any leftover piece will eventually make it to the
00420                     // right free list.
00421                 }
00422             }
00423         _S_end_free = 0;    // In case of exception.
00424             _S_start_free = (char*)malloc_alloc::allocate(__bytes_to_get);
00425             // This should either throw an
00426             // exception or remedy the situation.  Thus we assume it
00427             // succeeded.
00428         }
00429         _S_heap_size += __bytes_to_get;
00430         _S_end_free = _S_start_free + __bytes_to_get;
00431         return(_S_chunk_alloc(__size, __nobjs));
00432     }
00433 }
00434 
00435 
00436 /* Returns an object of size __n, and optionally adds to size __n free list.*/
00437 /* We assume that __n is properly aligned.                                */
00438 /* We hold the allocation lock.                                         */
00439 template <bool __threads, int __inst>
00440 void*
00441 __default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
00442 {
00443     int __nobjs = 20;
00444     char* __chunk = _S_chunk_alloc(__n, __nobjs);
00445     _Obj* __STL_VOLATILE* __my_free_list;
00446     _Obj* __result;
00447     _Obj* __current_obj;
00448     _Obj* __next_obj;
00449     int __i;
00450 
00451     if (1 == __nobjs) return(__chunk);
00452     __my_free_list = _S_free_list + _S_freelist_index(__n);
00453 
00454     /* Build free list in chunk */
00455       __result = (_Obj*)__chunk;
00456       *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
00457       for (__i = 1; ; __i++) {
00458         __current_obj = __next_obj;
00459         __next_obj = (_Obj*)((char*)__next_obj + __n);
00460         if (__nobjs - 1 == __i) {
00461             __current_obj -> _M_free_list_link = 0;
00462             break;
00463         } else {
00464             __current_obj -> _M_free_list_link = __next_obj;
00465         }
00466       }
00467     return(__result);
00468 }
00469 
00470 template <bool threads, int inst>
00471 void*
00472 __default_alloc_template<threads, inst>::reallocate(void* __p,
00473                                                     size_t __old_sz,
00474                                                     size_t __new_sz)
00475 {
00476     void* __result;
00477     size_t __copy_sz;
00478 
00479     if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES) {
00480         return(realloc(__p, __new_sz));
00481     }
00482     if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) return(__p);
00483     __result = allocate(__new_sz);
00484     __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
00485     memcpy(__result, __p, __copy_sz);
00486     deallocate(__p, __old_sz);
00487     return(__result);
00488 }
00489 
00490 #ifdef __STL_THREADS
00491     template <bool __threads, int __inst>
00492     _STL_mutex_lock
00493     __default_alloc_template<__threads, __inst>::_S_node_allocator_lock
00494         __STL_MUTEX_INITIALIZER;
00495 #endif
00496 
00497 
00498 template <bool __threads, int __inst>
00499 char* __default_alloc_template<__threads, __inst>::_S_start_free = 0;
00500 
00501 template <bool __threads, int __inst>
00502 char* __default_alloc_template<__threads, __inst>::_S_end_free = 0;
00503 
00504 template <bool __threads, int __inst>
00505 size_t __default_alloc_template<__threads, __inst>::_S_heap_size = 0;
00506 
00507 template <bool __threads, int __inst>
00508 typename __default_alloc_template<__threads, __inst>::_Obj* __STL_VOLATILE
00509 __default_alloc_template<__threads, __inst> ::_S_free_list[
00510     __default_alloc_template<__threads, __inst>::_NFREELISTS
00511 ] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, };
00512 // The 16 zeros are necessary to make version 4.1 of the SunPro
00513 // compiler happy.  Otherwise it appears to allocate too little
00514 // space for the array.
00515 
00516 #endif /* ! __USE_MALLOC */
00517 
00518 // This implements allocators as specified in the C++ standard.  
00519 //
00520 // Note that standard-conforming allocators use many language features
00521 // that are not yet widely implemented.  In particular, they rely on
00522 // member templates, partial specialization, partial ordering of function
00523 // templates, the typename keyword, and the use of the template keyword
00524 // to refer to a template member of a dependent type.
00525 
00526 template <class _Tp>
00527 class allocator {
00528   typedef alloc _Alloc;          // The underlying allocator.
00529 public:
00530   typedef size_t     size_type;
00531   typedef ptrdiff_t  difference_type;
00532   typedef _Tp*       pointer;
00533   typedef const _Tp* const_pointer;
00534   typedef _Tp&       reference;
00535   typedef const _Tp& const_reference;
00536   typedef _Tp        value_type;
00537 
00538   template <class _Tp1> struct rebind {
00539     typedef allocator<_Tp1> other;
00540   };
00541 
00542   allocator() __STL_NOTHROW {}
00543   allocator(const allocator&) __STL_NOTHROW {}
00544   template <class _Tp1> allocator(const allocator<_Tp1>&) __STL_NOTHROW {}
00545   ~allocator() __STL_NOTHROW {}
00546 
00547   pointer address(reference __x) const { return &__x; }
00548   const_pointer address(const_reference __x) const { return &__x; }
00549 
00550   // __n is permitted to be 0.  The C++ standard says nothing about what
00551   // the return value is when __n == 0.
00552   _Tp* allocate(size_type __n, const void* = 0) {
00553     return __n != 0 ? static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp))) 
00554                     : 0;
00555   }
00556 
00557   // __p is not permitted to be a null pointer.
00558   void deallocate(pointer __p, size_type __n)
00559     { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
00560 
00561   size_type max_size() const __STL_NOTHROW 
00562     { return size_t(-1) / sizeof(_Tp); }
00563 
00564   void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
00565   void destroy(pointer __p) { __p->~_Tp(); }
00566 };
00567 
00568 template<>
00569 class allocator<void> {
00570 public:
00571   typedef size_t      size_type;
00572   typedef ptrdiff_t   difference_type;
00573   typedef void*       pointer;
00574   typedef const void* const_pointer;
00575   typedef void        value_type;
00576 
00577   template <class _Tp1> struct rebind {
00578     typedef allocator<_Tp1> other;
00579   };
00580 };
00581 
00582 
00583 template <class _T1, class _T2>
00584 inline bool operator==(const allocator<_T1>&, const allocator<_T2>&) 
00585 {
00586   return true;
00587 }
00588 
00589 template <class _T1, class _T2>
00590 inline bool operator!=(const allocator<_T1>&, const allocator<_T2>&)
00591 {
00592   return false;
00593 }
00594 
00595 // Allocator adaptor to turn an SGI-style allocator (e.g. alloc, malloc_alloc)
00596 // into a standard-conforming allocator.   Note that this adaptor does
00597 // *not* assume that all objects of the underlying alloc class are
00598 // identical, nor does it assume that all of the underlying alloc's
00599 // member functions are static member functions.  Note, also, that 
00600 // __allocator<_Tp, alloc> is essentially the same thing as allocator<_Tp>.
00601 
00602 template <class _Tp, class _Alloc>
00603 struct __allocator {
00604   _Alloc __underlying_alloc;
00605 
00606   typedef size_t    size_type;
00607   typedef ptrdiff_t difference_type;
00608   typedef _Tp*       pointer;
00609   typedef const _Tp* const_pointer;
00610   typedef _Tp&       reference;
00611   typedef const _Tp& const_reference;
00612   typedef _Tp        value_type;
00613 
00614   template <class _Tp1> struct rebind {
00615     typedef __allocator<_Tp1, _Alloc> other;
00616   };
00617 
00618   __allocator() __STL_NOTHROW {}
00619   __allocator(const __allocator& __a) __STL_NOTHROW
00620     : __underlying_alloc(__a.__underlying_alloc) {}
00621   template <class _Tp1> 
00622   __allocator(const __allocator<_Tp1, _Alloc>& __a) __STL_NOTHROW
00623     : __underlying_alloc(__a.__underlying_alloc) {}
00624   ~__allocator() __STL_NOTHROW {}
00625 
00626   pointer address(reference __x) const { return &__x; }
00627   const_pointer address(const_reference __x) const { return &__x; }
00628 
00629   // __n is permitted to be 0.
00630   _Tp* allocate(size_type __n, const void* = 0) {
00631     return __n != 0 
00632         ? static_cast<_Tp*>(__underlying_alloc.allocate(__n * sizeof(_Tp))) 
00633         : 0;
00634   }
00635 
00636   // __p is not permitted to be a null pointer.
00637   void deallocate(pointer __p, size_type __n)
00638     { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
00639 
00640   size_type max_size() const __STL_NOTHROW 
00641     { return size_t(-1) / sizeof(_Tp); }
00642 
00643   void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
00644   void destroy(pointer __p) { __p->~_Tp(); }
00645 };
00646 
00647 template <class _Alloc>
00648 class __allocator<void, _Alloc> {
00649   typedef size_t      size_type;
00650   typedef ptrdiff_t   difference_type;
00651   typedef void*       pointer;
00652   typedef const void* const_pointer;
00653   typedef void        value_type;
00654 
00655   template <class _Tp1> struct rebind {
00656     typedef __allocator<_Tp1, _Alloc> other;
00657   };
00658 };
00659 
00660 template <class _Tp, class _Alloc>
00661 inline bool operator==(const __allocator<_Tp, _Alloc>& __a1,
00662                        const __allocator<_Tp, _Alloc>& __a2)
00663 {
00664   return __a1.__underlying_alloc == __a2.__underlying_alloc;
00665 }
00666 
00667 template <class _Tp, class _Alloc>
00668 inline bool operator!=(const __allocator<_Tp, _Alloc>& __a1,
00669                        const __allocator<_Tp, _Alloc>& __a2)
00670 {
00671   return __a1.__underlying_alloc != __a2.__underlying_alloc;
00672 }
00673 
00674 // Comparison operators for all of the predifined SGI-style allocators.
00675 // This ensures that __allocator<malloc_alloc> (for example) will
00676 // work correctly.
00677 
00678 template <int inst>
00679 inline bool operator==(const __malloc_alloc_template<inst>&,
00680                        const __malloc_alloc_template<inst>&)
00681 {
00682   return true;
00683 }
00684 
00685 template <int __inst>
00686 inline bool operator!=(const __malloc_alloc_template<__inst>&,
00687                        const __malloc_alloc_template<__inst>&)
00688 {
00689   return false;
00690 }
00691 
00692 template <class _Alloc>
00693 inline bool operator==(const debug_alloc<_Alloc>&,
00694                        const debug_alloc<_Alloc>&) {
00695   return true;
00696 }
00697 
00698 template <class _Alloc>
00699 inline bool operator!=(const debug_alloc<_Alloc>&,
00700                        const debug_alloc<_Alloc>&) {
00701   return false;
00702 }
00703 
00704 // Another allocator adaptor: _Alloc_traits.  This serves two
00705 // purposes.  First, make it possible to write containers that can use
00706 // either SGI-style allocators or standard-conforming allocator.
00707 // Second, provide a mechanism so that containers can query whether or
00708 // not the allocator has distinct instances.  If not, the container
00709 // can avoid wasting a word of memory to store an empty object.
00710 
00711 // This adaptor uses partial specialization.  The general case of
00712 // _Alloc_traits<_Tp, _Alloc> assumes that _Alloc is a
00713 // standard-conforming allocator, possibly with non-equal instances
00714 // and non-static members.  (It still behaves correctly even if _Alloc
00715 // has static member and if all instances are equal.  Refinements
00716 // affect performance, not correctness.)
00717 
00718 // There are always two members: allocator_type, which is a standard-
00719 // conforming allocator type for allocating objects of type _Tp, and
00720 // _S_instanceless, a static const member of type bool.  If
00721 // _S_instanceless is true, this means that there is no difference
00722 // between any two instances of type allocator_type.  Furthermore, if
00723 // _S_instanceless is true, then _Alloc_traits has one additional
00724 // member: _Alloc_type.  This type encapsulates allocation and
00725 // deallocation of objects of type _Tp through a static interface; it
00726 // has two member functions, whose signatures are
00727 //    static _Tp* allocate(size_t)
00728 //    static void deallocate(_Tp*, size_t)
00729 
00730 // The fully general version.
00731 
00732 template <class _Tp, class _Allocator>
00733 struct _Alloc_traits
00734 {
00735   static const bool _S_instanceless = false;
00736   typedef typename _Allocator::template rebind<_Tp>::other allocator_type;
00737 };
00738 
00739 template <class _Tp, class _Allocator>
00740 const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
00741 
00742 // The version for the default allocator.
00743 
00744 template <class _Tp, class _Tp1>
00745 struct _Alloc_traits<_Tp, allocator<_Tp1> >
00746 {
00747   static const bool _S_instanceless = true;
00748   typedef simple_alloc<_Tp, alloc> _Alloc_type;
00749   typedef allocator<_Tp> allocator_type;
00750 };
00751 
00752 // Versions for the predefined SGI-style allocators.
00753 
00754 template <class _Tp, int __inst>
00755 struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
00756 {
00757   static const bool _S_instanceless = true;
00758   typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
00759   typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
00760 };
00761 
00762 #ifndef __USE_MALLOC
00763 template <class _Tp, bool __threads, int __inst>
00764 struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
00765 {
00766   static const bool _S_instanceless = true;
00767   typedef simple_alloc<_Tp, __default_alloc_template<__threads, __inst> > 
00768           _Alloc_type;
00769   typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> > 
00770           allocator_type;
00771 };
00772 #endif
00773 
00774 template <class _Tp, class _Alloc>
00775 struct _Alloc_traits<_Tp, debug_alloc<_Alloc> >
00776 {
00777   static const bool _S_instanceless = true;
00778   typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
00779   typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
00780 };
00781 
00782 // Versions for the __allocator adaptor used with the predefined
00783 // SGI-style allocators.
00784 
00785 template <class _Tp, class _Tp1, int __inst>
00786 struct _Alloc_traits<_Tp, 
00787                      __allocator<_Tp1, __malloc_alloc_template<__inst> > >
00788 {
00789   static const bool _S_instanceless = true;
00790   typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
00791   typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
00792 };
00793 
00794 #ifndef __USE_MALLOC
00795 template <class _Tp, class _Tp1, bool __thr, int __inst>
00796 struct _Alloc_traits<_Tp, 
00797                       __allocator<_Tp1, 
00798                                   __default_alloc_template<__thr, __inst> > >
00799 {
00800   static const bool _S_instanceless = true;
00801   typedef simple_alloc<_Tp, __default_alloc_template<__thr,__inst> > 
00802           _Alloc_type;
00803   typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> > 
00804           allocator_type;
00805 };
00806 #endif
00807 
00808 template <class _Tp, class _Tp1, class _Alloc>
00809 struct _Alloc_traits<_Tp, __allocator<_Tp1, debug_alloc<_Alloc> > >
00810 {
00811   static const bool _S_instanceless = true;
00812   typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
00813   typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
00814 };
00815 
00816 } // namespace std
00817 
00818 #endif /* __SGI_STL_INTERNAL_ALLOC_H */
00819 
00820 // Local Variables:
00821 // mode:C++
00822 // End:

Generated at Tue May 1 16:28:39 2001 for libstdc++-v3 by doxygen1.2.6 written by Dimitri van Heesch, © 1997-2001