sl@0: /*********************************************************************************** sl@0: test_algo.cpp sl@0: sl@0: * Copyright (c) 1997 sl@0: * Mark of the Unicorn, Inc. sl@0: * sl@0: * Permission to use, copy, modify, distribute and sell this software sl@0: * and its documentation for any purpose is hereby granted without fee, sl@0: * provided that the above copyright notice appear in all copies and sl@0: * that both that copyright notice and this permission notice appear sl@0: * in supporting documentation. Mark of the Unicorn makes no sl@0: * representations about the suitability of this software for any sl@0: * purpose. It is provided "as is" without express or implied warranty. sl@0: sl@0: ***********************************************************************************/ sl@0: #include "Tests.h" sl@0: #include "LeakCheck.h" sl@0: #include "SortClass.h" sl@0: sl@0: #if defined (EH_NEW_HEADERS) sl@0: # include sl@0: # include sl@0: #else sl@0: # include sl@0: # include sl@0: #endif sl@0: sl@0: #if defined (EH_NEW_IOSTREAMS) sl@0: # include sl@0: #else sl@0: # include sl@0: #endif sl@0: sl@0: // sl@0: // SortBuffer -- a buffer of SortClass objects that can be used to test sorting. sl@0: // sl@0: struct SortBuffer sl@0: { sl@0: enum { kBufferSize = 100 }; sl@0: sl@0: SortClass* begin() { return items; } sl@0: const SortClass* begin() const { return items; } sl@0: SortClass* end() { return items + kBufferSize; } sl@0: const SortClass* end() const { return items + kBufferSize; } sl@0: sl@0: // Sort each half of the buffer and reset the address of each element sl@0: // so that merge() can be tested for stability. sl@0: void PrepareMerge() sl@0: { sl@0: EH_STD::sort( begin(), begin() + ( end() - begin() )/2 ); sl@0: EH_STD::sort( begin() + ( end() - begin() )/2, end() ); sl@0: for ( SortClass* p = begin(); p != end(); p++ ) sl@0: p->ResetAddress(); sl@0: } sl@0: sl@0: SortBuffer() sl@0: { sl@0: PrepareMerge(); sl@0: } sl@0: sl@0: SortBuffer( const SortBuffer& rhs ) sl@0: { sl@0: SortClass* q = begin(); sl@0: for ( const SortClass* p = rhs.begin() ; p != rhs.end(); p++,q++ ) sl@0: *q = *p; sl@0: } sl@0: sl@0: private: sl@0: SortClass items[kBufferSize]; sl@0: }; sl@0: sl@0: // sl@0: // less_by_reference -- a functor for comparing objects against a sl@0: // constant value. sl@0: // sl@0: template sl@0: struct less_by_reference sl@0: { sl@0: less_by_reference( const T& arg ) : fArg(arg) {} sl@0: bool operator()( const T& x ) const { return x < fArg; } sl@0: private: sl@0: const T& fArg; sl@0: }; sl@0: sl@0: struct test_stable_partition sl@0: { sl@0: test_stable_partition( const SortBuffer& buf ) sl@0: : orig( buf ), partitionPoint(SortClass::kRange / 2) { sl@0: gTestController.SetCurrentTestName("stable_partition()"); sl@0: } sl@0: sl@0: void operator()( SortBuffer& buf ) const sl@0: { sl@0: less_by_reference throw_cmp( partitionPoint ); sl@0: sl@0: SortClass* d = EH_STD::stable_partition( buf.begin(), buf.end(), throw_cmp ); sl@0: sl@0: // If we get here no exception occurred during the operation. sl@0: // Stop any potential failures that might occur during verification. sl@0: gTestController.CancelFailureCountdown(); sl@0: sl@0: // Prepare an array of counts of the occurrence of each value in sl@0: // the legal range. sl@0: unsigned counts[SortClass::kRange]; sl@0: EH_STD::fill_n( counts, (int)SortClass::kRange, 0 ); sl@0: for ( const SortClass *q = orig.begin(); q != orig.end(); q++ ) sl@0: counts[ q->value() ]++; sl@0: sl@0: less_by_reference cmp( partitionPoint ); sl@0: for ( const SortClass* p = buf.begin(); p != buf.end(); p++ ) sl@0: { sl@0: // Check that adjacent items with the same value haven't been sl@0: // reordered. This could be a more thorough test. sl@0: if ( p != buf.begin() && p->value() == p[-1].value() ) sl@0: EH_ASSERT( p->GetAddress() > p[-1].GetAddress() ); sl@0: sl@0: // Check that the partitioning worked. sl@0: EH_ASSERT( (p < d) == cmp( *p ) ); sl@0: sl@0: // Decrement the appropriate count for each value. sl@0: counts[ p->value() ]--; sl@0: } sl@0: sl@0: // Check that the values were only rearranged, and none were lost. sl@0: for ( unsigned j = 0; j < SortClass::kRange; j++ ) sl@0: EH_ASSERT( counts[j] == 0 ); sl@0: } sl@0: sl@0: private: sl@0: const SortBuffer& orig; sl@0: SortClass partitionPoint; sl@0: }; sl@0: sl@0: void assert_sorted_version( const SortBuffer& orig, const SortBuffer& buf ); sl@0: sl@0: /*=================================================================================== sl@0: assert_sorted_version sl@0: sl@0: EFFECTS: Asserts that buf is a stable-sorted version of orig. sl@0: ====================================================================================*/ sl@0: void assert_sorted_version( const SortBuffer& orig, const SortBuffer& buf ) sl@0: { sl@0: // Stop any potential failures that might occur during verification. sl@0: gTestController.CancelFailureCountdown(); sl@0: sl@0: // Prepare an array of counts of the occurrence of each value in sl@0: // the legal range. sl@0: unsigned counts[SortClass::kRange]; sl@0: EH_STD::fill_n( counts, (int)SortClass::kRange, 0 ); sl@0: for ( const SortClass *q = orig.begin(); q != orig.end(); q++ ) sl@0: counts[ q->value() ]++; sl@0: sl@0: // Check that each element is greater than the previous one, or if they are sl@0: // equal, that their order has been preserved. sl@0: for ( const SortClass* p = buf.begin(); p != buf.end(); p++ ) sl@0: { sl@0: if ( p != buf.begin() ) sl@0: EH_ASSERT( p->value() > p[-1].value() sl@0: || p->value() == p[-1].value() && p->GetAddress() > p[-1].GetAddress() ); sl@0: // Decrement the appropriate count for each value. sl@0: counts[ p->value() ]--; sl@0: } sl@0: sl@0: // Check that the values were only rearranged, and none were lost. sl@0: for ( unsigned j = 0; j < SortClass::kRange; j++ ) sl@0: EH_ASSERT( counts[j] == 0 ); sl@0: } sl@0: sl@0: // sl@0: // The test operators sl@0: // sl@0: struct test_stable_sort_1 sl@0: { sl@0: test_stable_sort_1( const SortBuffer& buf ) sl@0: : orig( buf ) { sl@0: gTestController.SetCurrentTestName("stable_sort() #1"); sl@0: } sl@0: sl@0: void operator()( SortBuffer& buf ) const sl@0: { sl@0: EH_STD::stable_sort( buf.begin(), buf.end() ); sl@0: assert_sorted_version( orig, buf ); sl@0: } sl@0: sl@0: private: sl@0: const SortBuffer& orig; sl@0: }; sl@0: sl@0: struct test_stable_sort_2 sl@0: { sl@0: test_stable_sort_2( const SortBuffer& buf ) sl@0: : orig( buf ) { sl@0: gTestController.SetCurrentTestName("stable_sort() #2"); sl@0: } sl@0: sl@0: void operator()( SortBuffer& buf ) const sl@0: { sl@0: EH_STD::stable_sort( buf.begin(), buf.end(), EH_STD::less() ); sl@0: assert_sorted_version( orig, buf ); sl@0: } sl@0: sl@0: private: sl@0: const SortBuffer& orig; sl@0: }; sl@0: sl@0: struct test_inplace_merge_1 sl@0: { sl@0: test_inplace_merge_1( SortBuffer& buf ) sl@0: : orig( buf ) { sl@0: gTestController.SetCurrentTestName("inplace_merge #1()"); sl@0: } sl@0: sl@0: void operator()( SortBuffer& buf ) const sl@0: { sl@0: EH_STD::inplace_merge( buf.begin(), buf.begin() + ( buf.end() - buf.begin() )/2, buf.end() ); sl@0: assert_sorted_version( orig, buf ); sl@0: } sl@0: sl@0: private: sl@0: const SortBuffer& orig; sl@0: }; sl@0: sl@0: struct test_inplace_merge_2 sl@0: { sl@0: test_inplace_merge_2( SortBuffer& buf ) sl@0: : orig( buf ) { sl@0: gTestController.SetCurrentTestName("inplace_merge() #2"); sl@0: } sl@0: sl@0: void operator()( SortBuffer& buf ) const sl@0: { sl@0: EH_STD::inplace_merge( buf.begin(), buf.begin() + ( buf.end() - buf.begin() )/2, buf.end(), sl@0: EH_STD::less() ); sl@0: assert_sorted_version( orig, buf ); sl@0: } sl@0: sl@0: private: sl@0: const SortBuffer& orig; sl@0: }; sl@0: sl@0: void test_algo() sl@0: { sl@0: SortBuffer mergeBuf; sl@0: mergeBuf.PrepareMerge(); sl@0: sl@0: EH_STD::cerr<<"EH test : testing algo.h"<