1 #ifndef BM__H__INCLUDED__
2 #define BM__H__INCLUDED__
30 # include <initializer_list>
37 #pragma warning( push )
38 #pragma warning( disable : 4311 4312 4127)
47 # define BM_DECLARE_TEMP_BLOCK(x) bm::bit_block_t x;
106 template<
class Alloc>
155 position_(ref.position_)
157 bv_.set(position_, ref.bv_.get_bit(position_));
162 return bv_.get_bit(position_);
167 bv_.set(position_, (
bool)ref);
173 bv_.set(position_, value);
179 return bool(*
this) == bool(ref);
185 bv_.set_bit_and(position_, value);
192 if (value != bv_.get_bit(position_))
194 bv_.set_bit(position_);
202 bv_.set(position_, value != bv_.get_bit(position_));
209 return !bv_.get_bit(position_);
215 return !bv_.get_bit(position_);
294 if (this->
bv_ != ib.bv_)
return false;
295 if (this->
position_ != ib.position_)
return false;
296 if (this->
block_ != ib.block_)
return false;
297 if (this->
block_type_ != ib.block_type_)
return false;
298 if (this->
block_idx_ != ib.block_idx_)
return false;
309 for (
unsigned i = 0; i < bd.
bit_.
cnt; ++i)
497 buf_ =
bvect_->blockman_.get_allocator().alloc_bit_block();
514 buf_ = iit.buf_; iit.buf_ = 0;
535 buf_ = ii.buf_; ii.buf_ = 0;
677 return this->
valid();
696 static bool decode_wave(block_descr_type* bdescr)
BMNOEXCEPT;
697 bool decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT;
698 bool decode_bit_group(block_descr_type* bdescr,
725 bit_count_ = this->
valid();
733 this->bit_count_ = 1;
740 this->bit_count_ += this->
valid();
748 this->bit_count_ += this->
valid();
780 bv.set_allocator_pool(&pool);
785 bv_->set_allocator_pool(0);
792 if (!bv.get_allocator_pool())
796 bv_->set_allocator_pool(&pool);
826 : strat(s), glevel_len(glevels)
861 const Alloc& alloc = Alloc())
862 : blockman_(glevel_len, bv_size, alloc),
863 new_blocks_strat_(strat),
873 const Alloc& alloc = Alloc())
874 : blockman_(glevel_len, bv_size, alloc),
875 new_blocks_strat_(strat),
883 : blockman_(
bvect.blockman_),
884 new_blocks_strat_(
bvect.new_blocks_strat_),
894 : blockman_(
bvect.blockman_.glevel_len_,
bvect.blockman_.max_bits_,
bvect.blockman_.alloc_),
895 new_blocks_strat_(
bvect.new_blocks_strat_),
898 if (!
bvect.blockman_.is_init())
902 copy_range_no_check(
bvect, left, right);
919 blockman_.deinit_tree();
920 blockman_.copy(
bvect.blockman_);
932 blockman_.move_from(
bvect.blockman_);
934 new_blocks_strat_ =
bvect.new_blocks_strat_;
942 new_blocks_strat_(
BM_BIT),
946 std::initializer_list<size_type>::const_iterator it_start = il.begin();
947 std::initializer_list<size_type>::const_iterator it_end = il.end();
948 for (; it_start < it_end; ++it_start)
1017 {
return blockman_.get_allocator(); }
1023 { blockman_.get_allocator().set_pool(pool_ptr); }
1028 {
return blockman_.get_allocator().get_pool(); }
1219 void clear(
bool free_mem =
false) { blockman_.set_all_zero(free_mem); }
1245 insert_iterator
inserter() {
return insert_iterator(*
this); }
1541 {
return (++prev ==
bm::id_max) ? 0 : check_or_next(prev); }
1552 return (++prev ==
bm::id_max) ? 0 : check_or_next_extract(prev);
1819 {
return new_blocks_strat_; }
1835 statistics* stat = 0);
1925 {
return blockman_; }
1933 {
return blockman_; }
1979 bool and_bit_no_check(
size_type n,
bool val);
1981 bool set_bit_conditional_impl(
size_type n,
bool val,
bool condition);
1994 bool combine_operation_block_or(
unsigned i,
1998 bool combine_operation_block_xor(
unsigned i,
2002 bool combine_operation_block_and(
unsigned i,
2006 bool combine_operation_block_sub(
unsigned i,
2012 void combine_operation_block_or(
unsigned i,
2017 void combine_operation_block_xor(
unsigned i,
2022 void combine_operation_block_and(
unsigned i,
2027 void combine_operation_block_sub(
unsigned i,
2046 void clear_range_no_check(
size_type left,
2052 void keep_range_no_check(
size_type left,
2061 unsigned nbit_right,
2077 template<
class Alloc>
2088 template<
class Alloc>
2099 template<
class Alloc>
2110 template<
class Alloc>
2122 template<
typename Alloc>
2125 if (!blockman_.is_init())
2126 blockman_.init_tree();
2131 template<
typename Alloc>
2136 blockman_.move_from(
bvect.blockman_);
2137 size_ =
bvect.size_;
2138 new_blocks_strat_ =
bvect.new_blocks_strat_;
2144 template<
class Alloc>
2147 if (!blockman_.is_init())
2153 keep_range_no_check(left, right);
2157 template<
typename Alloc>
2162 if (!blockman_.is_init())
2184 set_range_no_check(left, right);
2186 clear_range_no_check(left, right);
2193 template<
typename Alloc>
2196 if (!blockman_.is_init())
2199 word_t*** blk_root = blockman_.top_blocks_root();
2203 unsigned top_blocks = blockman_.top_block_size();
2204 for (
unsigned i = 0; i < top_blocks; ++i)
2213 blk_blk = blk_root[i];
2227 cnt += blockman_.block_bitcount(blk_blk[j]);
2229 cnt += blockman_.block_bitcount(blk_blk[j+1]);
2231 cnt += blockman_.block_bitcount(blk_blk[j+2]);
2233 cnt += blockman_.block_bitcount(blk_blk[j+3]);
2243 template<
typename Alloc>
2246 word_t*** blk_root = blockman_.top_blocks_root();
2249 typename blocks_manager_type::block_any_func func(blockman_);
2255 template<
typename Alloc>
2258 if (size_ == new_size)
return;
2259 if (size_ < new_size)
2261 if (!blockman_.is_init())
2262 blockman_.init_tree();
2264 blockman_.reserve(new_size);
2276 template<
typename Alloc>
2283 if (found && last >= size_)
2289 template<
typename Alloc>
2304 if (!blockman_.is_init())
2317 unsigned real_top_blocks = blockman_.find_real_top_blocks();
2318 unsigned max_top_blocks = blockman_.find_max_top_blocks();
2321 rs_idx->set_total(nb + 1);
2322 rs_idx->resize(nb + 1);
2323 rs_idx->resize_effective_super_blocks(real_top_blocks);
2327 BM_ASSERT(max_top_blocks <= blockman_.top_block_size());
2328 bm::word_t*** blk_root = blockman_.top_blocks_root();
2329 for (
unsigned i = 0; i < max_top_blocks; ++i)
2334 rs_idx->set_null_super_block(i);
2339 rs_idx->set_full_super_block(i);
2344 bv_blocks->set_range_no_check(nb_from, nb_to);
2355 bcount[j] = sub_count[j] = 0;
2359 unsigned cnt = blockman_.block_bitcount(block);
2361 if (bv_blocks && cnt)
2367 unsigned local_first, local_second;
2389 BM_ASSERT(cnt >= local_first + local_second);
2390 sub_count[j] = local_first | (local_second << 16);
2394 rs_idx->register_super_block(i, &bcount[0], &sub_count[0]);
2403 template<
typename Alloc>
2407 bm::word_t*** blk_root = blockman_.top_blocks_root();
2410 typename blocks_manager_type::block_count_arr_func func(blockman_, &(arr[0]));
2412 return func.last_block();
2417 template<
typename Alloc>
2421 unsigned nbit_right,
2425 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
2427 unsigned sub_cnt = rs_idx.sub_count(nb);
2428 unsigned first = sub_cnt & 0xFFFF;
2429 unsigned second = sub_cnt >> 16;
2473 unsigned bc_second_range =
first + second;
2477 c = bc_second_range;
2485 c = bc_second_range - c;
2491 unsigned bc_second_range =
first + second;
2499 c += bc_second_range;
2506 c = rs_idx.count(nb);
2531 template<
typename Alloc>
2537 if (!blockman_.is_init())
2546 if (nblock_right >= rs_idx.get_total())
2548 cnt = rs_idx.count();
2551 cnt = nblock_right ? rs_idx.rcount(nblock_right-1) : 0;
2555 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2571 cnt += nbit_right + 1;
2576 this->block_count_to(block, nblock_right, nbit_right, rs_idx);
2585 template<
typename Alloc>
2591 if (!blockman_.is_init())
2599 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2618 cnt = nbit_right + 1;
2623 unsigned w = block[nword];
2627 cnt = block_count_to(block, nblock_right, nbit_right, rs_idx);
2636 cnt += nblock_right ? rs_idx.rcount(nblock_right - 1) : 0;
2642 template<
typename Alloc>
2648 if (!blockman_.is_init())
2654 size_type cnt = nblock_right ? rs_idx.rcount(nblock_right - 1) : 0;
2658 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2675 cnt += block_count_to(block, nblock_right, nbit_right, rs_idx);
2687 template<
typename Alloc>
2697 if (!blockman_.is_init())
2708 const bm::word_t* block = blockman_.get_block(i0, j0);
2718 typename blocks_manager_type::block_count_func func(blockman_);
2741 cnt += func.count();
2742 if (nblock_left == nblock_right)
2750 word_t*** blk_root = blockman_.top_blocks_root();
2754 nblock_left+1, nblock_right-1, func);
2755 cnt += func.count();
2759 block = blockman_.get_block(i0, j0);
2780 template<
typename Alloc>
2784 if (!blockman_.is_init())
2801 const bm::word_t* block = blockman_.get_block(i0, j0);
2803 if (nblock_left == nblock_right)
2823 block = blockman_.get_block(i0, j0);
2833 if (nblock_left <= nblock_right)
2835 unsigned i_from, j_from, i_to, j_to;
2839 bm::word_t*** blk_root = blockman_.top_blocks_root();
2841 for (
unsigned i = i_from; i <= i_to; ++i)
2849 unsigned j = (i == i_from) ? j_from : 0;
2856 }
while (++j < j_limit);
2864 template<
typename Alloc>
2869 if (!blockman_.is_init())
2884 const bm::word_t* block = blockman_.get_block(i0, j0);
2886 if (nblock_left == nblock_right)
2906 block = blockman_.get_block(i0, j0);
2916 if (nblock_left <= nblock_right)
2918 unsigned i_from, j_from, i_to, j_to;
2922 bm::word_t*** blk_root = blockman_.top_blocks_root();
2925 if (i_from >= top_size)
2927 if (i_to >= top_size)
2929 i_to = unsigned(top_size-1);
2934 for (
unsigned i = i_from; i <= i_to; ++i)
2942 unsigned j = (i == i_from) ? j_from : 0;
2949 }
while (++j < j_limit);
2957 template<
typename Alloc>
2971 return this->
test(left);
2975 cnt_l = this->
count_to(left-1, rs_idx);
2979 cnt_r = this->
count_to(right, rs_idx);
2981 return cnt_r - cnt_l;
2986 template<
typename Alloc>
2993 bm::word_t*** blk_root = blockman_.top_blocks_root();
2994 for (
unsigned i = 0; i < top_blocks; ++i)
3015 blockman_.set_block_ptr(i, j, 0);
3036 template<
typename Alloc>
3046 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3061 unsigned w = block[nword];
3070 template<
typename Alloc>
3075 if (!blockman_.is_init())
3082 temp_block = blockman_.check_allocate_tempblock();
3087 ::memcpy(stat->gap_levels,
3089 stat->max_serialize_mem = (unsigned)
sizeof(
bm::id_t) * 4;
3092 blockman_.optimize_tree(temp_block, opt_mode, stat);
3096 size_t safe_inc = stat->max_serialize_mem / 10;
3097 if (!safe_inc) safe_inc = 256;
3098 stat->max_serialize_mem += safe_inc;
3100 stat->memory_used += (unsigned)(
sizeof(*
this) -
sizeof(blockman_));
3102 unsigned top_size = blockman_.top_block_size();
3103 size_t blocks_mem =
sizeof(blockman_);
3104 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3106 stat->memory_used += blocks_mem;
3111 blockman_.free_temp_block();
3116 template<
typename Alloc>
3120 if (!blockman_.is_init())
3123 struct bvector<Alloc>::statistics st;
3130 ::memcpy(opt_glen, st.gap_levels, bm::gap_levels * sizeof(*opt_glen));
3133 st.gap_length + st.gap_blocks,
3142 template<typename Alloc>
3145 if (blockman_.is_init())
3147 word_t*** blk_root = blockman_.top_blocks_root();
3149 blocks_manager_type::gap_level_func gl_func(blockman_, glevel_len);
3153 blockman_.set_glen(glevel_len);
3158 template<
typename Alloc>
3162 unsigned top_blocks = blockman_.top_block_size();
3163 unsigned bvect_top_blocks = bv.blockman_.top_block_size();
3165 if (bvect_top_blocks > top_blocks) top_blocks = bvect_top_blocks;
3167 for (
unsigned i = 0; i < top_blocks; ++i)
3169 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3170 const bm::word_t*
const* arg_blk_blk = bv.blockman_.get_topblock(i);
3172 if (blk_blk == arg_blk_blk)
3182 arg_blk = arg_blk_blk ? arg_blk_blk[j] : 0;
3190 blk = blk_blk ? blk_blk[j] : 0;
3194 if (blk == arg_blk)
continue;
3199 if (!blk || !arg_blk)
3276 template<
typename Alloc>
3281 unsigned top_blocks = blockman_.top_block_size();
3282 bm::word_t*** top_root = blockman_.top_blocks_root();
3284 if (!top_blocks || !top_root)
3289 unsigned i_to, j_to;
3291 unsigned bvect_top_blocks =
bvect.blockman_.top_block_size();
3292 if (!bvect_top_blocks || !arg_top_root)
3294 bool f = this->
find(pos);
3297 if (pos > search_to)
3303 if (bvect_top_blocks > top_blocks)
3304 top_blocks = bvect_top_blocks;
3308 if (i_to < top_blocks)
3309 top_blocks = i_to+1;
3311 for (
unsigned i = 0; i < top_blocks; ++i)
3313 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3314 const bm::word_t*
const* arg_blk_blk =
bvect.blockman_.get_topblock(i);
3316 if (blk_blk == arg_blk_blk)
3360 if (pos > search_to)
3380 template<
typename Alloc>
3385 blockman_.swap(
bvect.blockman_);
3392 template<
typename Alloc>
3399 ::memcpy(st->gap_levels,
3402 st->max_serialize_mem = unsigned(
sizeof(
bm::id_t) * 4);
3403 unsigned top_size = blockman_.top_block_size();
3405 size_t blocks_mem =
sizeof(blockman_);
3408 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3409 bm::word_t*** blk_root = blockman_.top_blocks_root();
3413 for (
unsigned i = 0; i < top_size; ++i)
3415 const bm::word_t*
const* blk_blk = blk_root[i];
3422 blk_blk = blk_root[i];
3429 st->ptr_sub_blocks++;
3440 st->add_gap_block(cap, len);
3443 st->add_bit_block();
3448 size_t full_null_size = blockman_.calc_serialization_null_full();
3449 st->max_serialize_mem += full_null_size;
3453 size_t safe_inc = st->max_serialize_mem / 10;
3454 if (!safe_inc) safe_inc = 256;
3455 st->max_serialize_mem += safe_inc;
3458 st->memory_used += unsigned(
sizeof(*
this) -
sizeof(blockman_));
3460 st->memory_used += blocks_mem;
3467 template<
class Alloc>
3481 blockman_.check_allocate_block(nblock,
3490 this->gap_block_set_no_ret(
BMGAP_PTR(blk), val, nblock, nbit);
3496 blk[nword] |= (1u << nbit);
3502 template<
class Alloc>
3506 if (!ids || !ids_size)
3508 if (!blockman_.is_init())
3509 blockman_.init_tree();
3511 import(ids, ids_size, so);
3517 template<
class Alloc>
3521 if (!ids || !ids_size || !blockman_.is_init())
3527 bv_tmp.
import(ids, ids_size, so);
3545 template<
class Alloc>
3549 if (!ids || !ids_size || !blockman_.is_init())
3554 bv_tmp.
import(ids, ids_size, so);
3571 template<
class Alloc>
3580 template<
class Alloc>
3589 template<
class Alloc>
3592 if (val == condition)
return false;
3598 return set_bit_conditional_impl(n, val, condition);
3603 template<
class Alloc>
3609 if (!blockman_.is_init())
3610 blockman_.init_tree();
3611 return and_bit_no_check(n, val);
3616 template<
class Alloc>
3621 if (!blockman_.is_init())
3622 blockman_.init_tree();
3633 template<
class Alloc>
3649 if (nblock == nblock_end)
3679 }
while (start < size_in);
3684 template<
class Alloc>
3693 blockman_.check_allocate_block(nblock, 1, 0, &block_type,
3700 blk = blockman_.deoptimize_block(nblock);
3715 template<
class Alloc>
3723 blockman_.check_allocate_block(nblock,
3735 return gap_block_set(
BMGAP_PTR(blk), val, nblock, nbit);
3746 val = ~(*word & mask);
3752 val = ~(*word & mask);
3762 template<
class Alloc>
3767 unsigned is_set, new_len, old_len;
3770 if (old_len < new_len)
3772 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
3773 if (new_len > threshold)
3774 blockman_.extend_gap_block(nblock, gap_blk);
3781 template<
class Alloc>
3782 void bvector<Alloc>::gap_block_set_no_ret(
bm::gap_word_t* gap_blk,
3785 unsigned new_len, old_len;
3788 if (old_len < new_len)
3790 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
3791 if (new_len > threshold)
3792 blockman_.extend_gap_block(nblock, gap_blk);
3799 template<
class Alloc>
3805 blockman_.check_allocate_block(nblock,
3816 this->gap_block_set(gap_blk, !is_set, nblock, nbit);
3825 is_set = ((*word) & mask);
3827 *word = (is_set) ? (*word & ~mask) : (*word | mask);
3834 template<
class Alloc>
3843 blockman_.check_allocate_block(nblock,
3853 if (block_type == 1)
3858 if (old_val != condition)
3865 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3877 bool is_set = ((*word) & mask) != 0;
3879 if (is_set != condition)
3903 template<
class Alloc>
3904 bool bvector<Alloc>::and_bit_no_check(
size_type n,
bool val)
3911 blockman_.check_allocate_block(nblock,
3921 if (block_type == 1)
3926 bool new_val = val & old_val;
3927 if (new_val != old_val)
3929 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3941 bool is_set = ((*word) & mask) != 0;
3943 bool new_val = is_set & val;
3962 template<
class Alloc>
3971 pos = check_or_next(from);
3977 template<
class Alloc>
3982 unsigned top_blocks = blockman_.top_block_size();
3985 for (
unsigned i = top_blocks-1;
true; --i)
3987 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4016 pos = base_idx + block_pos;
4034 template<
class Alloc>
4039 unsigned top_blocks = blockman_.top_block_size();
4040 for (
unsigned i = 0; i < top_blocks; ++i)
4042 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4056 found =
true; block_pos = 0;
4068 pos = base_idx + block_pos;
4080 template<
class Alloc>
4084 bool found =
find(in_first);
4093 in_first = in_last = 0;
4100 template<
class Alloc>
4109 if (!rank_in || !blockman_.is_init())
4114 unsigned bit_pos = 0;
4119 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4124 unsigned cnt = blockman_.block_bitcount(block);
4153 template<
class Alloc>
4164 !blockman_.is_init() ||
4165 (rs_idx.count() < rank_in))
4173 nb = rs_idx.find(rank_in);
4174 BM_ASSERT(rs_idx.rcount(nb) >= rank_in);
4176 rank_in -= rs_idx.rcount(nb-1);
4180 unsigned bit_pos = 0;
4182 for (; nb < rs_idx.get_total(); ++nb)
4185 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4190 unsigned block_bc = rs_idx.count(nb);
4191 if (rank_in <= block_bc)
4193 nbit = rs_idx.select_sub_range(nb, rank_in);
4199 rank_in -= block_bc;
4224 template<
class Alloc>
4231 !blockman_.is_init() ||
4232 (rs_idx.count() < rank_in))
4238 bool found = rs_idx.find(&rank_in, &nb, &sub_range_from);
4244 const bm::word_t* block = blockman_.get_block_ptr(i, j);
4251 unsigned bit_pos = 0;
4260 template<
class Alloc>
4264 if (!blockman_.is_init())
4271 const bm::word_t* block = blockman_.get_block_ptr(i, j);
4301 for (; i < top_blocks; ++i)
4303 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4318 found =
true; block_pos = 0;
4330 prev = base_idx + block_pos;
4344 template<
class Alloc>
4346 bvector<Alloc>::check_or_next_extract(
size_type prev)
4348 if (!blockman_.is_init())
4351 size_type pos = this->check_or_next(prev);
4359 template<
class Alloc>
4367 template<
class Alloc>
4370 bool b = this->
test(0);
4377 template<
class Alloc>
4384 if (!blockman_.is_init())
4403 bm::word_t* block = blockman_.get_block_ptr(i, j);
4405 if (!block && !value)
4410 block = blockman_.check_allocate_block(nb,
BM_BIT);
4412 block = blockman_.deoptimize_block(nb);
4425 unsigned top_blocks = blockman_.top_block_size();
4426 bm::word_t*** blk_root = blockman_.top_blocks_root();
4432 if (i >= top_blocks)
4439 blk_blk = blk_root[i];
4450 blockman_.check_allocate_block(nblock, 0, 0, &block_type,
false);
4451 block[0] |= carry_over;
4454 blk_root = blockman_.top_blocks_root();
4455 blk_blk = blk_root[i];
4456 top_blocks = blockman_.top_block_size();
4467 blk_blk = blockman_.check_alloc_top_subblock(i);
4481 carry_over = 0; block = 0;
4486 if (0 != (block = blk_blk[j]))
4501 block = blockman_.deoptimize_block(nblock);
4502 block[0] <<= (carry_over = 1);
4511 block = blockman_.deoptimize_block(nblock);
4515 unsigned new_block_len;
4519 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4520 if (new_block_len > threshold)
4522 blockman_.extend_gap_block(nblock, gap_blk);
4538 blockman_.zero_block(nblock);
4542 blockman_.zero_block(nblock);
4554 template<
class Alloc>
4559 if (!blockman_.is_init())
4571 bm::word_t* block = blockman_.get_block_ptr(i, j);
4572 bool carry_over = test_first_block_bit(nb+1);
4577 block = blockman_.check_allocate_block(nb,
BM_BIT);
4584 block = blockman_.deoptimize_block(nb);
4596 unsigned top_blocks = blockman_.top_block_size();
4597 bm::word_t*** blk_root = blockman_.top_blocks_root();
4603 if (i >= top_blocks)
4606 blk_blk = blk_root[i];
4618 bool carry_over = 0;
4622 carry_over = this->
test(co_idx);
4626 blk_blk = blockman_.check_alloc_top_subblock(i);
4630 blk_blk = blockman_.check_alloc_top_subblock(i);
4638 bool carry_over = 0;
4643 bool no_blocks = (j == 0);
4646 if (0 != (block = blk_blk[j]))
4656 blockman_.zero_block(i, j-1);
4664 carry_over = test_first_block_bit(nblock+1);
4669 block = blockman_.deoptimize_block(nblock);
4677 carry_over = test_first_block_bit(nblock+1);
4678 unsigned new_block_len;
4682 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4683 if (new_block_len > threshold)
4684 blockman_.extend_gap_block(nblock, gap_blk);
4688 blockman_.zero_block(i, j);
4696 blockman_.zero_block(i, j);
4699 if (carry_over && nblock)
4712 template<
class Alloc>
4723 template<
class Alloc>
4726 if (!bv.blockman_.is_init())
4732 unsigned top_blocks = blockman_.top_block_size();
4733 if (size_ < bv.size_)
4737 unsigned arg_top_blocks = bv.blockman_.top_block_size();
4738 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
4741 bm::word_t*** blk_root = blockman_.top_blocks_root();
4742 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
4744 for (
unsigned i = 0; i < top_blocks; ++i)
4747 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
4748 if (blk_blk == blk_blk_arg || !blk_blk_arg)
4750 if (!blk_blk && blk_blk_arg)
4754 blk_root[i] = blk_root_arg[i];
4755 blk_root_arg[i] = 0;
4762 blockman_.deallocate_top_subblock(i);
4772 blk = blk_blk[j]; arg_blk = blk_blk_arg[j];
4775 if (!blk && arg_blk)
4777 blockman_.set_block_ptr(i, j, arg_blk);
4778 bv.blockman_.set_block_ptr(i, j, 0);
4782 combine_operation_block_or(i, j, blk, arg_blk);
4791 template<
class Alloc>
4799 bm::word_t* blk = blockman_.get_block_ptr(i0, j0);
4807 template<
class Alloc>
4813 if (blockman_.is_init())
4814 blockman_.deinit_tree();
4835 unsigned top_blocks1 = bman1.top_block_size();
4836 unsigned top_blocks2 = bman2.top_block_size();
4837 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4838 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4841 if (size_ < bv2.size_)
4844 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4850 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4857 for (
unsigned i = 0; i < top_blocks; ++i)
4859 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4860 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4862 if (blk_blk_arg1 == blk_blk_arg2)
4865 bm::word_t*** blk_root = blockman_.top_blocks_root();
4866 blk_root[i] = blk_blk_arg1;
4872 bm::word_t*** blk_root = blockman_.top_blocks_root();
4876 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4877 bool any_blocks =
false;
4881 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4882 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4883 if (arg_blk1 == arg_blk2 && !arg_blk1)
4885 bool need_opt = combine_operation_block_or(i, j, arg_blk1, arg_blk2);
4887 blockman_.optimize_bit_block(i, j);
4888 any_blocks |= bool(blk_blk[j]);
4892 blockman_.free_top_subblock(i);
4897 blockman_.free_temp_block();
4904 template<
class Alloc>
4910 if (blockman_.is_init())
4911 blockman_.deinit_tree();
4928 if (!bman1.is_init())
4934 if (!bman2.is_init())
4940 unsigned top_blocks1 = bman1.top_block_size();
4941 unsigned top_blocks2 = bman2.top_block_size();
4942 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4943 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4946 if (size_ < bv2.size_)
4949 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4950 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4952 for (
unsigned i = 0; i < top_blocks; ++i)
4954 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4955 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4957 if (blk_blk_arg1 == blk_blk_arg2)
4962 blockman_.deallocate_top_subblock(i);
4970 bm::word_t*** blk_root= blockman_.top_blocks_root();
4983 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4984 bool any_blocks =
false;
4989 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4990 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4992 if ((arg_blk1 == arg_blk2) &&
4996 bool need_opt = combine_operation_block_xor(i, j, arg_blk1, arg_blk2);
4998 blockman_.optimize_bit_block(i, j);
4999 any_blocks |= bool(blk_blk[j]);
5003 blockman_.free_top_subblock(i);
5008 blockman_.free_temp_block();
5015 template<
class Alloc>
5036 if (blockman_.is_init())
5037 blockman_.deinit_tree();
5041 if (!bman1.is_init() || !bman2.is_init())
5044 unsigned top_blocks1 = bman1.top_block_size();
5045 unsigned top_blocks2 = bman2.top_block_size();
5046 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5047 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5050 if (size_ < bv2.size_)
5053 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5054 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5056 for (
unsigned i = 0; i < top_blocks; ++i)
5058 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5059 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5061 if (blk_blk_arg1 == blk_blk_arg2)
5067 bm::word_t*** blk_root = blockman_.top_blocks_root();
5077 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5078 bool any_blocks =
false;
5083 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5084 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5086 if ((arg_blk1 == arg_blk2) && !arg_blk1)
5089 bool need_opt = combine_operation_block_and(i, j, arg_blk1, arg_blk2);
5091 blockman_.optimize_bit_block(i, j);
5092 any_blocks |= bool(blk_blk[j]);
5096 blockman_.free_top_subblock(i);
5101 blockman_.free_temp_block();
5109 template<
class Alloc>
5115 if (blockman_.is_init())
5116 blockman_.deinit_tree();
5134 if (!bman1.is_init())
5138 if (!bman2.is_init())
5144 unsigned top_blocks1 = bman1.top_block_size();
5145 unsigned top_blocks2 = bman2.top_block_size();
5146 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5147 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5150 if (size_ < bv2.size_)
5153 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5154 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5156 for (
unsigned i = 0; i < top_blocks; ++i)
5158 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5159 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5161 if (blk_blk_arg1 == blk_blk_arg2)
5168 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5169 bool any_blocks =
false;
5173 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5174 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5175 if ((arg_blk1 == arg_blk2) && !arg_blk1)
5178 bool need_opt = combine_operation_block_sub(i, j, arg_blk1, arg_blk2);
5180 blockman_.optimize_bit_block(i, j);
5181 any_blocks |= bool(blk_blk[j]);
5185 blockman_.free_top_subblock(i);
5190 blockman_.free_temp_block();
5198 #define BM_OR_OP(x) \
5200 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
5201 if (blk != arg_blk) \
5202 combine_operation_block_or(i, j+x, blk, arg_blk); \
5205 template<
class Alloc>
5208 if (!bv.blockman_.is_init())
5211 unsigned top_blocks = blockman_.top_block_size();
5212 if (size_ < bv.size_)
5215 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5216 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5218 bm::word_t*** blk_root = blockman_.top_blocks_root();
5219 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5221 for (
unsigned i = 0; i < top_blocks; ++i)
5224 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5225 if (blk_blk == blk_blk_arg || !blk_blk_arg)
5231 blockman_.deallocate_top_subblock(i);
5236 blk_blk = blockman_.alloc_top_subblock(i);
5243 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5244 if (!avx2_test_all_eq_wave2(blk_blk + j, blk_blk_arg + j))
5252 #elif defined(BM64_SSE4)
5271 #define BM_XOR_OP(x) \
5273 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
5274 combine_operation_block_xor(i, j+x, blk, arg_blk); \
5277 template<
class Alloc>
5280 if (!bv.blockman_.is_init())
5282 if (!blockman_.is_init())
5288 unsigned top_blocks = blockman_.top_block_size();
5289 if (size_ < bv.size_)
5293 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5294 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5296 bm::word_t*** blk_root = blockman_.top_blocks_root();
5297 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5299 for (
unsigned i = 0; i < top_blocks; ++i)
5301 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5305 if (blk_blk == blk_blk_arg)
5315 blk_blk = blockman_.check_alloc_top_subblock(i);
5328 blk_blk = blockman_.alloc_top_subblock(i);
5335 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5336 if (!avx2_test_all_zero_wave2(blk_blk + j, blk_blk_arg + j))
5344 #elif defined(BM64_SSE4)
5364 #define BM_AND_OP(x) if (0 != (blk = blk_blk[j+x])) \
5366 if (0 != (arg_blk = blk_blk_arg[j+x])) \
5368 combine_operation_block_and(i, j+x, blk, arg_blk); \
5369 if (opt_mode == opt_compress) \
5370 blockman_.optimize_bit_block(i, j+x); \
5373 blockman_.zero_block(i, j+x); \
5376 template<
class Alloc>
5380 if (!blockman_.is_init())
5382 if (!bv.blockman_.is_init())
5388 unsigned top_blocks = blockman_.top_block_size();
5389 if (size_ < bv.size_)
5392 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5393 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5396 bm::word_t*** blk_root = blockman_.top_blocks_root();
5397 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5399 for (
unsigned i = 0; i < top_blocks; ++i)
5404 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5414 blockman_.zero_block(i, j);
5415 blockman_.deallocate_top_subblock(i);
5423 blk_blk = blockman_.check_alloc_top_subblock(i);
5430 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5431 if (!avx2_test_all_zero_wave(blk_blk + j))
5439 #elif defined(BM64_SSE4)
5459 #define BM_SUB_OP(x) \
5460 if ((0 != (blk = blk_blk[j+x])) && (0 != (arg_blk = blk_blk_arg[j+x]))) \
5461 combine_operation_block_sub(i, j+x, blk, arg_blk);
5464 template<
class Alloc>
5467 if (!blockman_.is_init() || !bv.blockman_.is_init())
5470 unsigned top_blocks = blockman_.top_block_size();
5471 if (size_ < bv.size_)
5474 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5475 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5477 bm::word_t*** blk_root = blockman_.top_blocks_root();
5478 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5480 for (
unsigned i = 0; i < top_blocks; ++i)
5483 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5484 if (!blk_blk || !blk_blk_arg)
5489 blockman_.deallocate_top_subblock(i);
5493 blk_blk = blockman_.check_alloc_top_subblock(i);
5500 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5501 if (!avx2_test_all_zero_wave(blk_blk + j))
5509 #elif defined(BM64_SSE4)
5528 template<
class Alloc>
5533 if (!blockman_.is_init())
5537 blockman_.init_tree();
5540 unsigned top_blocks = blockman_.top_block_size();
5541 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5543 if (arg_top_blocks > top_blocks)
5544 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5546 if (size_ < bv.size_)
5550 blockman_.reserve_top_blocks(arg_top_blocks);
5551 top_blocks = blockman_.top_block_size();
5554 if (size_ > bv.size_)
5559 if (arg_top_blocks < top_blocks)
5562 top_blocks = arg_top_blocks;
5567 bm::word_t*** blk_root = blockman_.top_blocks_root();
5568 unsigned block_idx = 0;
5572 top_blocks = blockman_.top_block_size();
5573 if (top_blocks < bv.blockman_.top_block_size())
5577 top_blocks = bv.blockman_.top_block_size();
5581 for (i = 0; i < top_blocks; ++i)
5591 const bm::word_t*
const* bvbb = bv.blockman_.get_topblock(i);
5601 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5619 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5626 blockman_.zero_block(i, j);
5637 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5650 template<
class Alloc>
5659 blockman_.clone_assign_block(i, j, arg_blk2);
5664 blockman_.clone_assign_block(i, j, arg_blk1);
5676 if (is_gap1 | is_gap2)
5678 if (is_gap1 & is_gap2)
5684 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5692 arg_block = arg_blk2;
5697 arg_block = arg_blk1;
5700 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5708 bm::word_t* block = blockman_.borrow_tempblock();
5709 blockman_.set_block_ptr(i, j, block);
5715 blockman_.return_tempblock(block);
5723 template<
class Alloc>
5724 bool bvector<Alloc>::combine_operation_block_xor(
unsigned i,
5733 blockman_.clone_assign_block(i, j, arg_blk2);
5738 blockman_.clone_assign_block(i, j, arg_blk1);
5744 blockman_.clone_assign_block(i, j, arg_blk2,
true);
5750 blockman_.clone_assign_block(i, j, arg_blk1,
true);
5757 if (is_gap1 | is_gap2)
5759 if (is_gap1 & is_gap2)
5765 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5773 arg_block = arg_blk2;
5778 arg_block = arg_blk1;
5781 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5789 bm::word_t* block = blockman_.borrow_tempblock();
5790 blockman_.set_block_ptr(i, j, block);
5795 blockman_.set_block_ptr(i, j, 0);
5796 blockman_.return_tempblock(block);
5805 template<
class Alloc>
5806 bool bvector<Alloc>::combine_operation_block_and(
unsigned i,
5813 if (!arg_blk1 || !arg_blk2)
5824 blockman_.clone_assign_block(i, j, arg_blk2);
5829 blockman_.clone_assign_block(i, j, arg_blk1);
5836 if (is_gap1 | is_gap2)
5838 if (is_gap1 & is_gap2)
5844 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5852 arg_block = arg_blk2;
5857 arg_block = arg_blk1;
5860 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5868 bm::word_t* block = blockman_.borrow_tempblock();
5869 blockman_.set_block_ptr(i, j, block);
5874 blockman_.set_block_ptr(i, j, 0);
5875 blockman_.return_tempblock(block);
5884 template<
class Alloc>
5885 bool bvector<Alloc>::combine_operation_block_sub(
unsigned i,
5896 blockman_.clone_assign_block(i, j, arg_blk1);
5907 if (is_gap1 | is_gap2)
5909 if (is_gap1 & is_gap2)
5915 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5921 bm::word_t* block = blockman_.borrow_tempblock();
5922 blockman_.set_block_ptr(i, j, block);
5927 blockman_.set_block_ptr(i, j, 0);
5928 blockman_.return_tempblock(block);
5934 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_blk1);
5942 bm::word_t* block = blockman_.borrow_tempblock();
5943 blockman_.set_block_ptr(i, j, block);
5948 blockman_.set_block_ptr(i, j, 0);
5949 blockman_.return_tempblock(block);
5958 template<
class Alloc>
5959 void bvector<Alloc>::combine_operation_block_or(
5960 unsigned i,
unsigned j,
5970 blockman_.zero_block(i, j);
5984 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
5989 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
5993 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
5994 blockman_.set_block_ptr(i, j, new_blk);
6006 blk = blockman_.clone_gap_block(arg_gap, gap);
6007 blockman_.set_block(i, j, blk, gap);
6019 blk = blockman_.alloc_.alloc_bit_block();
6021 blockman_.set_block_ptr(i, j, blk);
6029 blockman_.get_allocator().free_bit_block(blk);
6037 template<
class Alloc>
6038 void bvector<Alloc>::combine_operation_block_xor(
6039 unsigned i,
unsigned j,
6055 blockman_.set_block_ptr(i, j, 0);
6071 blk = blockman_.get_allocator().alloc_bit_block();
6073 blockman_.set_block_ptr(i, j, blk);
6089 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6094 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6098 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
6099 blockman_.set_block_ptr(i, j, new_blk);
6111 blk = blockman_.clone_gap_block(arg_gap, gap);
6112 blockman_.set_block(i, j, blk, gap);
6123 blk = blockman_.alloc_.alloc_bit_block();
6125 blockman_.set_block_ptr(i, j, blk);
6132 blockman_.get_allocator().free_bit_block(blk);
6133 blockman_.set_block_ptr(i, j, 0);
6141 template<
class Alloc>
6142 void bvector<Alloc>::combine_operation_block_and(
6164 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6169 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6179 blockman_.get_allocator().free_bit_block(new_blk);
6186 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
6187 blockman_.set_block_ptr(i, j, new_blk);
6198 blockman_.zero_block(i, j);
6206 bm::word_t* new_blk = blockman_.clone_gap_block(arg_gap, is_new_gap);
6210 blockman_.set_block_ptr(i, j, new_blk);
6218 blockman_.zero_block(i, j);
6228 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6230 blockman_.set_block_ptr(i, j, new_blk);
6237 blockman_.get_allocator().free_bit_block(blk);
6238 blockman_.set_block_ptr(i, j, 0);
6244 template<
class Alloc>
6245 void bvector<Alloc>::combine_operation_block_sub(
6252 blockman_.zero_block(i, j);
6271 BM_ASSERT(!(res == tmp_buf && res_len == 0));
6273 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6277 blk = blockman_.convert_gap2bitset(i, j, gap_blk);
6289 blockman_.zero_block(i, j);
6304 if (!dst || !arg_blk)
6308 if (ret && ret == arg_blk)
6310 ret = blockman_.get_allocator().alloc_bit_block();
6316 blockman_.set_block_ptr(i, j, ret);
6318 blockman_.get_allocator().free_bit_block(dst);
6324 template<
class Alloc>
6339 if (!blk && arg_gap)
6341 blk = blockman_.clone_gap_block(
BMGAP_PTR(arg_blk), gap);
6342 blockman_.set_block(nb, blk, gap);
6361 BM_ASSERT(!(res == tmp_buf && res_len == 0));
6366 blockman_.zero_block(nb);
6368 blockman_.assign_gap(nb, res, ++res_len, blk, tmp_buf);
6381 blockman_.zero_block(nb);
6390 blk = blockman_.convert_gap2bitset(nb, gap_blk);
6427 if (dst == 0 && arg_blk == 0)
6439 ret = blockman_.get_allocator().alloc_bit_block();
6461 }
while (wrd_ptr < wrd_end);
6471 ret = blockman_.get_allocator().alloc_bit_block();
6478 if (ret && ret == arg_blk)
6480 ret = blockman_.get_allocator().alloc_bit_block();
6491 blockman_.set_block(nb, ret);
6494 blockman_.get_allocator().free_bit_block(dst);
6501 template<
class Alloc>
6502 void bvector<Alloc>::set_range_no_check(
size_type left,
6528 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6533 block = blockman_.get_block_ptr(i, j);
6540 if (nblock_left == nblock_right)
6552 blockman_.set_all_set(nb, nb_to-1);
6555 if (nb_to > nblock_right)
6559 block = blockman_.get_block_ptr(i, j);
6561 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6575 template<
class Alloc>
6576 void bvector<Alloc>::clear_range_no_check(
size_type left,
6603 bm::gap_init_range_block<gap_word_t>(tmp_gap_blk,
6608 block = blockman_.get_block_ptr(i, j);
6616 if (nblock_left == nblock_right)
6618 nb = nblock_left + 1;
6628 blockman_.set_all_zero(nb, nb_to - 1u);
6631 if (nb_to > nblock_right)
6635 block = blockman_.get_block_ptr(i, j);
6636 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6652 template<
class Alloc>
6657 if (!
bvect.blockman_.is_init())
6663 if (blockman_.is_init())
6665 blockman_.deinit_tree();
6670 copy_range_no_check(
bvect, left, right);
6675 template<
class Alloc>
6683 clear_range_no_check(0, left - 1);
6689 if (found && (last > right))
6690 clear_range_no_check(right + 1, last);
6697 template<
class Alloc>
6698 void bvector<Alloc>::copy_range_no_check(
const bvector<Alloc>&
bvect,
6709 blockman_.copy(
bvect.blockman_, nblock_left, nblock_right);
6715 clear_range_no_check(from, left-1);
6721 if (found && (last > right))
6722 clear_range_no_check(right+1, last);
6729 template<
class Alloc>
6733 throw std::bad_alloc();
6735 BM_THROW(BM_ERR_BADALLOC);
6743 template<
class Alloc>
6749 if (this->block_type_)
6759 unsigned val = *(++(bdescr->
gap_.
ptr));
6760 this->position_ += val - prev;
6765 val = *(++(bdescr->
gap_.
ptr));
6773 unsigned short idx = ++(bdescr->
bit_.
idx);
6774 if (idx < bdescr->bit_.cnt)
6782 if (decode_bit_group(bdescr))
6786 if (search_in_blocks())
6796 template<
class Alloc>
6802 return this->valid();
6807 switch (this->block_type_)
6812 unsigned short idx = ++(bdescr->
bit_.
idx);
6813 if (idx < bdescr->bit_.cnt)
6822 if (!decode_bit_group(bdescr,
rank))
6837 unsigned int val = *(++(bdescr->
gap_.
ptr));
6839 this->position_ += val - prev;
6844 val = *(++(bdescr->
gap_.
ptr));
6855 if (!search_in_blocks())
6862 return this->valid();
6869 template<
class Alloc>
6875 return this->valid();
6878 size_type new_pos = this->bv_->check_or_next(pos);
6888 this->position_ = pos;
6891 this->bv_->get_blocks_manager();
6894 this->block_ = bman.get_block(i0, j0);
6898 this->block_type_ = (bool)
BM_IS_GAP(this->block_);
6903 if (this->block_type_)
6906 search_in_gapblock();
6908 if (this->position_ == pos)
6909 return this->valid();
6910 this->position_ = pos;
6917 bdescr->
gap_.
ptr = gptr + gpos;
6933 search_in_bitblock();
6934 return this->valid();
6941 bdescr->
bit_.
ptr = this->block_ + (nword - parity);
6947 nbit += 32 * parity;
6948 for (
unsigned i = 0; i < bdescr->
bit_.
cnt; ++i)
6951 return this->valid();
6956 return this->valid();
6961 template<
class Alloc>
6967 if (!bman->is_init())
6973 bm::word_t*** blk_root = bman->top_blocks_root();
6974 this->block_idx_ = this->position_= 0;
6977 for (i = 0; i < bman->top_block_size(); ++i)
6992 this->block_ = blk_blk[j];
6993 if (this->block_ == 0)
7000 this->block_type_ = 1;
7001 if (search_in_gapblock())
7008 this->block_type_ = 0;
7009 if (search_in_bitblock())
7020 template<
class Alloc>
7025 if (bdescr->bit_.cnt)
7027 bdescr->bit_.idx = 0;
7035 template<
class Alloc>
7037 bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT
7040 for (; bdescr->bit_.ptr < block_end;)
7042 if (decode_wave(bdescr))
7044 bdescr->bit_.pos = this->position_;
7045 this->position_ += bdescr->bit_.bits[0];
7056 template<
class Alloc>
7058 bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr,
7062 for (; bdescr->bit_.ptr < block_end;)
7075 if (decode_wave(bdescr))
7077 bdescr->bit_.pos = this->position_;
7078 this->position_ += bdescr->bit_.bits[0];
7090 template<
class Alloc>
7091 bool bvector<Alloc>::enumerator::search_in_bitblock()
BMNOEXCEPT
7095 block_descr_type* bdescr = &(this->bdescr_);
7096 bdescr->bit_.ptr = this->block_;
7097 return decode_bit_group(bdescr);
7102 template<
class Alloc>
7103 bool bvector<Alloc>::enumerator::search_in_gapblock()
BMNOEXCEPT
7107 block_descr_type* bdescr = &(this->bdescr_);
7108 bdescr->gap_.ptr =
BMGAP_PTR(this->block_);
7109 unsigned bitval = *(bdescr->gap_.ptr) & 1;
7111 ++(bdescr->gap_.ptr);
7115 unsigned val = *(bdescr->gap_.ptr);
7119 if (bdescr->gap_.ptr ==
first)
7121 bdescr->gap_.gap_len = (
gap_word_t)(val + 1);
7125 bdescr->gap_.gap_len =
7130 this->position_ += val + 1;
7134 ++(bdescr->gap_.ptr);
7141 template<
class Alloc>
7142 bool bvector<Alloc>::enumerator::search_in_blocks()
BMNOEXCEPT
7144 ++(this->block_idx_);
7148 bm::word_t*** blk_root = bman.top_blocks_root();
7149 for (; i < top_block_size; ++i)
7157 for (++i; i < top_block_size; ++i)
7164 this->block_idx_ = bn;
7165 this->position_ = pos;
7166 if ((i < top_block_size) && blk_root[i])
7177 this->block_ = blk_blk[j];
7178 if (this->block_ == 0)
7183 this->block_type_ =
BM_IS_GAP(this->block_);
7184 if (this->block_type_)
7186 if (search_in_gapblock())
7193 if (search_in_bitblock())
7209 #pragma warning( pop )