Alien-boost-mini

 view release on metacpan or  search on metacpan

include/boost/container/detail/dispatch_uses_allocator.hpp  view on Meta::CPAN

   };

   template <class T, class InnerAlloc, class ...Args>
   struct is_constructible_with_allocator_prefix
      : is_constructible<T, allocator_arg_t, InnerAlloc, Args...>
   {};

#else    // #if !defined(BOOST_NO_SFINAE_EXPR) && !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)

   //Without advanced SFINAE expressions, we can't use is_constructible
   //so backup to constructible_with_allocator_xxx

   #if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)

   template <class T, class InnerAlloc, class ...Args>
   struct is_constructible_with_allocator_prefix
      : constructible_with_allocator_prefix<T>
   {};

   template <class T, class InnerAlloc, class ...Args>
   struct is_constructible_with_allocator_suffix

include/boost/container/detail/node_pool_impl.hpp  view on Meta::CPAN

      }
   }

   //!Deallocates all the free blocks of memory. Never throws
   void deallocate_free_blocks()
   {
      typedef typename free_nodes_t::iterator nodelist_iterator;
      typename blockslist_t::iterator bit(m_blocklist.before_begin()),
                                      it(m_blocklist.begin()),
                                      itend(m_blocklist.end());
      free_nodes_t backup_list;
      nodelist_iterator backup_list_last = backup_list.before_begin();

      //Execute the algorithm and get an iterator to the last value
      size_type blocksize = (get_rounded_size)
         (m_real_node_size*m_nodes_per_block, (size_type) alignment_of<node_t>::value);

      while(it != itend){
         //Collect all the nodes from the block pointed by it
         //and push them in the list
         free_nodes_t free_nodes;
         nodelist_iterator last_it = free_nodes.before_begin();

include/boost/container/detail/node_pool_impl.hpp  view on Meta::CPAN

            (is_between(addr, blocksize), push_in_list(free_nodes, last_it));

         //If the number of nodes is equal to m_nodes_per_block
         //this means that the block can be deallocated
         if(free_nodes.size() == m_nodes_per_block){
            //Unlink the nodes
            free_nodes.clear();
            it = m_blocklist.erase_after(bit);
            mp_segment_mngr_base->deallocate((void*)addr);
         }
         //Otherwise, insert them in the backup list, since the
         //next "remove_if" does not need to check them again.
         else{
            //Assign the iterator to the last value if necessary
            if(backup_list.empty() && !m_freelist.empty()){
               backup_list_last = last_it;
            }
            //Transfer nodes. This is constant time.
            backup_list.splice_after
               ( backup_list.before_begin()
               , free_nodes
               , free_nodes.before_begin()
               , last_it
               , free_nodes.size());
            bit = it;
            ++it;
         }
      }
      //We should have removed all the nodes from the free list
      BOOST_ASSERT(m_freelist.empty());

      //Now pass all the node to the free list again
      m_freelist.splice_after
         ( m_freelist.before_begin()
         , backup_list
         , backup_list.before_begin()
         , backup_list_last
         , backup_list.size());
   }

   size_type num_free_nodes()
   {  return m_freelist.size();  }

   //!Deallocates all used memory. Precondition: all nodes allocated from this pool should
   //!already be deallocated. Otherwise, undefined behaviour. Never throws
   void purge_blocks()
   {
      //check for memory leaks

include/boost/container/string.hpp  view on Meta::CPAN


   void swap_data(basic_string_base& other)
   {
      if(this->is_short()){
         if(other.is_short()){
            repr_t tmp(this->members_.m_repr);
            this->members_.m_repr = other.members_.m_repr;
            other.members_.m_repr = tmp;
         }
         else{
            short_t short_backup(this->members_.m_repr.short_repr());
            this->members_.m_repr.short_repr().~short_t();
            ::new(&this->members_.m_repr.long_repr()) long_t(other.members_.m_repr.long_repr());
            other.members_.m_repr.long_repr().~long_t();
            ::new(&other.members_.m_repr.short_repr()) short_t(short_backup);
         }
      }
      else{
         if(other.is_short()){
            short_t short_backup(other.members_.m_repr.short_repr());
            other.members_.m_repr.short_repr().~short_t();
            ::new(&other.members_.m_repr.long_repr()) long_t(this->members_.m_repr.long_repr());
            this->members_.m_repr.long_repr().~long_t();
            ::new(&this->members_.m_repr.short_repr()) short_t(short_backup);
         }
         else{
            boost::adl_move_swap(this->members_.m_repr.long_repr(), other.members_.m_repr.long_repr());
         }
      }
   }
};

}  //namespace dtl {

include/boost/intrusive/bstree_algorithms.hpp  view on Meta::CPAN

   //!
   //! <b>Complexity</b>: Linear.
   static node_ptr rebalance_subtree(const node_ptr & old_root)
   {
      //Taken from:
      //"Tree rebalancing in optimal time and space"
      //Quentin F. Stout and Bette L. Warren

      //To avoid irregularities in the algorithm (old_root can be a
      //left or right child or even the root of the tree) just put the
      //root as the right child of its parent. Before doing this backup
      //information to restore the original relationship after
      //the algorithm is applied.
      node_ptr super_root = NodeTraits::get_parent(old_root);
      BOOST_INTRUSIVE_INVARIANT_ASSERT(super_root);

      //Get root info
      node_ptr super_root_right_backup = NodeTraits::get_right(super_root);
      bool super_root_is_header = NodeTraits::get_parent(super_root) == old_root;
      bool old_root_is_right  = is_right_child(old_root);
      NodeTraits::set_right(super_root, old_root);

      std::size_t size;
      subtree_to_vine(super_root, size);
      vine_to_subtree(super_root, size);
      node_ptr new_root = NodeTraits::get_right(super_root);

      //Recover root
      if(super_root_is_header){
         NodeTraits::set_right(super_root, super_root_right_backup);
         NodeTraits::set_parent(super_root, new_root);
      }
      else if(old_root_is_right){
         NodeTraits::set_right(super_root, new_root);
      }
      else{
         NodeTraits::set_right(super_root, super_root_right_backup);
         NodeTraits::set_left(super_root, new_root);
      }
      return new_root;
   }

   //! <b>Effects</b>: Asserts the integrity of the container with additional checks provided by the user.
   //!
   //! <b>Requires</b>: header must be the header of a tree.
   //!
   //! <b>Complexity</b>: Linear time.

include/boost/intrusive/splaytree_algorithms.hpp  view on Meta::CPAN

      if(leftmost == rightmost){ //Empty or unique node
         if(pfound){
            *pfound = old_root && !comp(key, old_root) && !comp(old_root, key);
         }
         return old_root ? old_root : header;
      }
      else{
         //Initialize "null node" (the header in our case)
         NodeTraits::set_left (header, node_ptr());
         NodeTraits::set_right(header, node_ptr());
         //Class that will backup leftmost/rightmost from header, commit the assemble(),
         //and will restore leftmost/rightmost to header even if "comp" throws
         detail::splaydown_assemble_and_fix_header<NodeTraits> commit(old_root, header, leftmost, rightmost);
         bool found = false;

         for( ;; ){
            if(comp(key, commit.t_)){
               node_ptr const t_left = NodeTraits::get_left(commit.t_);
               if(!t_left)
                  break;
               if(comp(key, t_left)){

include/boost/move/algo/adaptive_sort.hpp  view on Meta::CPAN

      n_keys = 0u;
      l_build_buf = l_intbuf;
   }
   else{
      //Try to achieve a l_build_buf of length l_intbuf*2, so that we can merge with that
      //l_intbuf*2 buffer in "build_blocks" and use half of them as buffer and the other half
      //as keys in combine_all_blocks. In that case n_keys >= n_min_ideal_keys but by a small margin.
      //
      //If available memory is 2*sqrt(l), then only sqrt(l) unique keys are needed,
      //(to be used for keys in combine_all_blocks) as the whole l_build_buf
      //will be backuped in the buffer during build_blocks.
      bool const non_unique_buf = xbuf.capacity() >= l_intbuf;
      size_type const to_collect = non_unique_buf ? n_min_ideal_keys : l_intbuf*2;
      size_type collected = collect_unique(first, first+len, to_collect, comp, xbuf);

      //If available memory is 2*sqrt(l), then for "build_params" 
      //the situation is the same as if 2*l_intbuf were collected.
      if(non_unique_buf && collected == n_min_ideal_keys){
         l_build_buf = l_intbuf;
         n_keys = n_min_ideal_keys;
      }



( run in 1.803 second using v1.01-cache-2.11-cpan-49f99fa48dc )