1 #ifndef BM__H__INCLUDED__ 2 #define BM__H__INCLUDED__ 30 # include <initializer_list> 37 #pragma warning( push ) 38 #pragma warning( disable : 4311 4312 4127) 47 # define BM_DECLARE_TEMP_BLOCK(x) bm::bit_block_t x; 106 template<
class Alloc>
155 position_(ref.position_)
157 bv_.set(position_, ref.bv_.get_bit(position_));
160 operator bool()
const 162 return bv_.get_bit(position_);
167 bv_.set(position_, (
bool)ref);
173 bv_.set(position_, value);
179 return bool(*
this) == bool(ref);
185 bv_.set_bit_and(position_, value);
192 if (value != bv_.get_bit(position_))
194 bv_.set_bit(position_);
202 bv_.set(position_, value != bv_.get_bit(position_));
209 return !bv_.get_bit(position_);
215 return !bv_.get_bit(position_);
290 if (this->bv_ != ib.
bv_)
return false;
291 if (this->position_ != ib.
position_)
return false;
292 if (this->block_ != ib.
block_)
return false;
293 if (this->block_type_ != ib.
block_type_)
return false;
294 if (this->block_idx_ != ib.
block_idx_)
return false;
299 if (this->block_type_ == 0)
305 for (
unsigned i = 0; i < bd.
bit_.
cnt; ++i)
386 max_bit_(bvect.
size())
392 : bvect_(iit.bvect_),
393 max_bit_(iit.max_bit_)
411 if (n >= bvect_->size())
414 bvect_->resize(new_size);
417 bvect_->set_bit_no_check(n);
467 : bvect_(0), buf_(0), buf_size_(0), sorted_(
BM_UNKNOWN) {}
473 bvect_->blockman_.get_allocator().free_bit_block((
bm::word_t*)buf_);
477 : bvect_(&bvect), sorted_(so)
481 buf_ = (value_type*) bvect_->blockman_.get_allocator().alloc_bit_block();
488 buf_ = bvect_->blockman_.get_allocator().alloc_bit_block();
491 ::memcpy(buf_, iit.
buf_, buf_size_ *
sizeof(*buf_));
495 : bvect_(iit.get_bvector())
497 buf_ = (value_type*) bvect_->blockman_.get_allocator().alloc_bit_block();
505 buf_ = iit.buf_; iit.buf_ = 0;
506 buf_size_ = iit.buf_size_;
507 sorted_ = iit.sorted_;
514 buf_ = bvect_->allocate_tempblock();
516 ::memcpy(buf_, ii.
buf_, buf_size_ *
sizeof(*buf_));
525 bvect_->free_tempblock(buf_);
526 buf_ = ii.buf_; ii.buf_ = 0;
527 buf_size_ = ii.buf_size_;
528 sorted_ = ii.sorted_;
537 if (buf_size_ == buf_size_max())
539 bvect_->import(buf_, buf_size_, sorted_);
542 buf_[buf_size_++] = n;
559 bvect_->import(buf_, buf_size_, sorted_);
632 size_type
value()
const {
return this->position_; }
653 blocks_manager_type* bman = &(this->bv_->blockman_);
654 if (!bman->is_init())
660 bm::word_t*** blk_root = bman->top_blocks_root();
662 this->block_idx_ = this->position_= 0;
665 for (i = 0; i < bman->top_block_size(); ++i)
681 this->block_ = blk_blk[j];
683 if (this->block_ == 0)
691 this->block_type_ = 1;
692 if (search_in_gapblock())
702 this->block_type_ = 0;
703 if (search_in_bitblock())
730 switch (this->block_type_)
735 unsigned short idx = ++(bdescr->
bit_.
idx);
736 if (idx < bdescr->bit_.cnt)
745 if (decode_bit_group(bdescr))
765 unsigned int val = *(++(bdescr->
gap_.
ptr));
767 this->position_ += val - prev;
783 if (search_in_blocks())
808 if (!this->valid() || !rank)
813 switch (this->block_type_)
818 unsigned short idx = ++(bdescr->
bit_.
idx);
819 if (idx < bdescr->bit_.cnt)
828 if (!decode_bit_group(bdescr, rank))
847 unsigned int val = *(++(bdescr->
gap_.
ptr));
849 this->position_ += val - prev;
867 if (!search_in_blocks())
887 size_type new_pos = this->bv_->check_or_next(pos);
897 this->position_ = pos;
900 this->bv_->get_blocks_manager();
902 bman.get_block_coord(nb, i0, j0);
903 this->block_ = bman.get_block(i0, j0);
907 this->block_type_ = (bool)
BM_IS_GAP(this->block_);
912 if (this->block_type_)
915 search_in_gapblock();
917 if (this->position_ == pos)
919 this->position_ = pos;
926 bdescr->
gap_.
ptr = gptr + gpos;
942 search_in_bitblock();
950 bdescr->
bit_.
ptr = this->block_ + (nword - parity);
957 for (
unsigned i = 0; i < bdescr->
bit_.
cnt; ++i)
972 bool decode_wave(block_descr_type* bdescr)
978 bdescr->
bit_.
pos = this->position_;
979 this->position_ += bdescr->
bit_.
bits[0];
985 bool decode_bit_group(block_descr_type* bdescr)
988 for (; bdescr->
bit_.
ptr < block_end;)
990 if (decode_wave(bdescr))
998 bool decode_bit_group(block_descr_type* bdescr, size_type&
rank)
1002 for (; bdescr->
bit_.
ptr < block_end;)
1013 if (decode_wave(bdescr))
1022 bool search_in_bitblock()
1026 block_descr_type* bdescr = &(this->bdescr_);
1027 bdescr->
bit_.
ptr = this->block_;
1029 return decode_bit_group(bdescr);
1032 bool search_in_gapblock()
1036 block_descr_type* bdescr = &(this->bdescr_);
1038 unsigned bitval = *(bdescr->
gap_.
ptr) & 1;
1044 unsigned val = *(bdescr->
gap_.
ptr);
1048 if (bdescr->
gap_.
ptr == first)
1059 this->position_ += val + 1;
1068 bool search_in_blocks()
1070 ++(this->block_idx_);
1071 const blocks_manager_type& bman = this->bv_->blockman_;
1073 block_idx_type top_block_size = bman.top_block_size();
1074 bm::word_t*** blk_root = bman.top_blocks_root();
1075 for (; i < top_block_size; ++i)
1083 for (++i; i < top_block_size; ++i)
1090 this->block_idx_ = bn;
1091 this->position_ = pos;
1092 if ((i < top_block_size) && blk_root[i])
1103 this->block_ = blk_blk[j];
1105 if (this->block_ == 0)
1111 this->block_type_ =
BM_IS_GAP(this->block_);
1112 if (this->block_type_)
1114 if (search_in_gapblock())
1121 if (search_in_bitblock())
1158 this->bit_count_ = 1;
1166 ++(this->bit_count_);
1184 size_type
count()
const {
return bit_count_; }
1190 size_type bit_count_;
1212 bv_->set_allocator_pool(0);
1250 : strat(s), glevel_len(glevels)
1285 const Alloc& alloc = Alloc())
1286 : blockman_(glevel_len, bv_size, alloc),
1287 new_blocks_strat_(strat),
1297 const Alloc& alloc = Alloc())
1298 : blockman_(glevel_len, bv_size, alloc),
1299 new_blocks_strat_(strat),
1307 : blockman_(bvect.blockman_),
1308 new_blocks_strat_(bvect.new_blocks_strat_),
1318 : blockman_(bvect.blockman_.glevel_len_, bvect.blockman_.max_bits_, bvect.blockman_.alloc_),
1319 new_blocks_strat_(bvect.new_blocks_strat_),
1322 if (!bvect.blockman_.is_init())
1326 copy_range_no_check(bvect, left, right);
1343 blockman_.deinit_tree();
1344 blockman_.copy(bvect.blockman_);
1356 blockman_.move_from(
bvect.blockman_);
1357 size_ =
bvect.size_;
1358 new_blocks_strat_ =
bvect.new_blocks_strat_;
1366 new_blocks_strat_(
BM_BIT),
1370 std::initializer_list<size_type>::const_iterator it_start = il.begin();
1371 std::initializer_list<size_type>::const_iterator it_end = il.end();
1372 for (; it_start < it_end; ++it_start)
1417 return reference(*
this, n);
1431 bool operator < (const bvector<Alloc>& bv)
const {
return compare(bv)<0; }
1432 bool operator <= (const bvector<Alloc>& bv)
const {
return compare(bv)<=0; }
1442 return blockman_.get_allocator();
1449 { blockman_.get_allocator().set_pool(pool_ptr); }
1454 {
return blockman_.get_allocator().get_pool(); }
1466 bool set_bit(size_type n,
bool val =
true);
1486 bool inc(size_type n);
1525 void set(
const size_type* ids, size_type ids_size,
1539 void keep(
const size_type* ids, size_type ids_size,
1553 void clear(
const size_type* ids, size_type ids_size,
1616 blockman_.set_all_zero(free_mem);
1647 insert_iterator
inserter() {
return insert_iterator(*
this); }
1657 size_type
capacity()
const {
return blockman_.capacity(); }
1660 size_type
size()
const {
return size_; }
1666 void resize(size_type new_size);
1679 size_type
count()
const;
1702 const rs_index_type& rs_idx)
const;
1713 size_type right)
const;
1737 size_type
count_to(size_type n,
const rs_index_type& rs_idx)
const;
1749 size_type
rank(size_type n,
const rs_index_type& rs_idx)
const 1768 size_type
count_to_test(size_type n,
const rs_index_type& blocks_cnt)
const;
1791 bool get_bit(size_type n)
const;
1827 bool insert(size_type n,
bool value);
1835 void erase(size_type n);
1867 bool find(size_type& pos)
const;
1877 bool find(size_type from, size_type& pos)
const;
1898 {
return (++prev ==
bm::id_max) ? 0 : check_or_next(prev); }
1909 return (++prev ==
bm::id_max) ? 0 : check_or_next_extract(prev);
1942 bool find_rank(size_type
rank, size_type from, size_type& pos)
const;
1961 bool find_rank(size_type rank, size_type from, size_type& pos,
1962 const rs_index_type& rs_idx)
const;
1980 bool select(size_type rank, size_type& pos,
const rs_index_type& rs_idx)
const;
2186 statistics* stat = 0);
2264 void import(
const size_type* ids, size_type ids_size,
2268 block_idx_type nblock, size_type start, size_type stop);
2272 size_type check_or_next(size_type prev)
const;
2276 bool val, block_idx_type nblock,
unsigned nbit);
2281 size_type check_or_next_extract(size_type prev);
2291 bool and_bit_no_check(size_type n,
bool val);
2293 bool set_bit_conditional_impl(size_type n,
bool val,
bool condition);
2306 bool combine_operation_block_or(
unsigned i,
2310 bool combine_operation_block_xor(
unsigned i,
2314 bool combine_operation_block_and(
unsigned i,
2318 bool combine_operation_block_sub(
unsigned i,
2324 void combine_operation_block_or(
unsigned i,
2329 void combine_operation_block_xor(
unsigned i,
2334 void combine_operation_block_and(
unsigned i,
2338 void combine_operation_block_sub(
unsigned i,
2352 void set_range_no_check(size_type left,
2357 void clear_range_no_check(size_type left,
2363 size_type block_count_to(
const bm::word_t* block,
2365 unsigned nbit_right,
2366 const rs_index_type& blocks_cnt);
2370 bool test_first_block_bit(block_idx_type nb)
const;
2373 blocks_manager_type blockman_;
2381 template<
class Alloc>
2392 template<
class Alloc>
2403 template<
class Alloc>
2414 template<
class Alloc>
2426 template<
typename Alloc>
2429 if (!blockman_.is_init())
2430 blockman_.init_tree();
2435 template<
typename Alloc>
2440 blockman_.move_from(
bvect.blockman_);
2441 size_ =
bvect.size_;
2442 new_blocks_strat_ =
bvect.new_blocks_strat_;
2450 template<
typename Alloc>
2455 if (!blockman_.is_init())
2477 set_range_no_check(left, right);
2479 clear_range_no_check(left, right);
2486 template<
typename Alloc>
2489 if (!blockman_.is_init())
2492 word_t*** blk_root = blockman_.top_blocks_root();
2496 unsigned top_blocks = blockman_.top_block_size();
2497 for (
unsigned i = 0; i < top_blocks; ++i)
2506 blk_blk = blk_root[i];
2517 cnt += blockman_.block_bitcount(blk_blk[j]);
2519 cnt += blockman_.block_bitcount(blk_blk[j+1]);
2521 cnt += blockman_.block_bitcount(blk_blk[j+2]);
2523 cnt += blockman_.block_bitcount(blk_blk[j+3]);
2533 template<
typename Alloc>
2536 word_t*** blk_root = blockman_.top_blocks_root();
2539 typename blocks_manager_type::block_any_func func(blockman_);
2545 template<
typename Alloc>
2548 if (size_ == new_size)
return;
2549 if (size_ < new_size)
2551 if (!blockman_.is_init())
2552 blockman_.init_tree();
2554 blockman_.reserve(new_size);
2566 template<
typename Alloc>
2573 if (found && last >= size_)
2579 template<
typename Alloc>
2594 if (!blockman_.is_init())
2605 blockman_.get_block_coord(nb, i0, j0);
2607 unsigned real_top_blocks = blockman_.find_real_top_blocks();
2608 unsigned max_top_blocks = blockman_.find_max_top_blocks();
2611 rs_idx->set_total(nb + 1);
2612 rs_idx->resize(nb + 1);
2613 rs_idx->resize_effective_super_blocks(real_top_blocks);
2617 BM_ASSERT(max_top_blocks <= blockman_.top_block_size());
2618 bm::word_t*** blk_root = blockman_.top_blocks_root();
2619 for (
unsigned i = 0; i < max_top_blocks; ++i)
2624 rs_idx->set_null_super_block(i);
2629 rs_idx->set_full_super_block(i);
2633 size_type nb_to = nb_from + bm::set_sub_array_size - 1;
2634 bv_blocks->set_range_no_check(nb_from, nb_to);
2645 bcount[j] = sub_count[j] = 0;
2649 unsigned cnt = blockman_.block_bitcount(block);
2651 if (bv_blocks && cnt)
2653 bv_blocks->
set(i * bm::set_sub_array_size + j);
2657 unsigned first, second;
2680 sub_count[j] = first | (second << 16);
2682 }
while (++j < bm::set_sub_array_size);
2684 rs_idx->register_super_block(i, &bcount[0], &sub_count[0]);
2693 template<
typename Alloc>
2697 bm::word_t*** blk_root = blockman_.top_blocks_root();
2700 typename blocks_manager_type::block_count_arr_func func(blockman_, &(arr[0]));
2702 return func.last_block();
2707 template<
typename Alloc>
2711 unsigned nbit_right,
2712 const rs_index_type& rs_idx)
2715 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
2717 unsigned sub_cnt = rs_idx.sub_count(nb);
2718 unsigned first = sub_cnt & 0xFFFF;
2719 unsigned second = sub_cnt >> 16;
2763 unsigned bc_second_range = first + second;
2767 c = bc_second_range;
2775 c = bc_second_range - c;
2781 unsigned bc_second_range = first + second;
2789 c += bc_second_range;
2796 c = rs_idx.count(nb);
2804 size_type cnt = rs_idx.count(nb);
2824 template<
typename Alloc>
2827 const rs_index_type& rs_idx)
const 2830 if (!blockman_.is_init())
2839 if (nblock_right >= rs_idx.get_total())
2841 cnt = rs_idx.count();
2844 cnt = nblock_right ? rs_idx.rcount(nblock_right-1) : 0;
2847 blockman_.get_block_coord(nblock_right, i, j);
2848 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2864 cnt += nbit_right + 1;
2869 this->block_count_to(block, nblock_right, nbit_right, rs_idx);
2878 template<
typename Alloc>
2881 const rs_index_type& blocks_cnt)
const 2884 if (!blockman_.is_init())
2894 blockman_.get_block_coord(nblock_right, i, j);
2895 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2913 cnt = nbit_right + 1;
2918 unsigned w = block[nword];
2922 cnt = block_count_to(block, nblock_right, nbit_right, blocks_cnt);
2929 cnt += nblock_right ? blocks_cnt.rcount(nblock_right - 1) : 0;
2935 template<
typename Alloc>
2945 if (!blockman_.is_init())
2955 blockman_.get_block_coord(nblock_left, i0, j0);
2956 const bm::word_t* block = blockman_.get_block(i0, j0);
2961 unsigned nbit_right = unsigned(right & bm::set_block_mask);
2966 typename blocks_manager_type::block_count_func func(blockman_);
2989 cnt += func.count();
2990 if (nblock_left == nblock_right)
2997 word_t*** blk_root = blockman_.top_blocks_root();
2998 unsigned top_blocks_size = blockman_.top_block_size();
3001 cnt += func.count();
3004 blockman_.get_block_coord(nblock_right, i0, j0);
3005 block = blockman_.get_block(i0, j0);
3027 template<
typename Alloc>
3031 const rs_index_type& rs_idx)
const 3039 return this->
test(left);
3041 size_type cnt_l, cnt_r;
3043 cnt_l = this->
count_to(left-1, rs_idx);
3047 cnt_r = this->
count_to(right, rs_idx);
3049 return cnt_r - cnt_l;
3054 template<
typename Alloc>
3058 bm::word_t*** blk_root = blockman_.top_blocks_root();
3059 for (
unsigned i = 0; i < top_blocks; ++i)
3080 blockman_.set_block_ptr(i, j, 0);
3101 template<
typename Alloc>
3110 blockman_.get_block_coord(nb, i, j);
3111 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3126 unsigned w = block[nword];
3135 template<
typename Alloc>
3140 if (!blockman_.is_init())
3147 temp_block = blockman_.check_allocate_tempblock();
3152 ::memcpy(stat->gap_levels,
3154 stat->max_serialize_mem = (unsigned)
sizeof(
bm::id_t) * 4;
3157 blockman_.optimize_tree(temp_block, opt_mode, stat);
3161 size_t safe_inc = stat->max_serialize_mem / 10;
3162 if (!safe_inc) safe_inc = 256;
3163 stat->max_serialize_mem += safe_inc;
3165 stat->memory_used += (unsigned)(
sizeof(*
this) -
sizeof(blockman_));
3167 unsigned top_size = blockman_.top_block_size();
3168 size_t blocks_mem =
sizeof(blockman_);
3169 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3171 stat->memory_used += blocks_mem;
3176 blockman_.free_temp_block();
3181 template<
typename Alloc>
3185 if (!blockman_.is_init())
3188 struct bvector<Alloc>::statistics st;
3195 ::memcpy(opt_glen, st.gap_levels, bm::gap_levels * sizeof(*opt_glen));
3198 st.gap_length + st.gap_blocks,
3207 template<typename Alloc>
3210 if (blockman_.is_init())
3212 word_t*** blk_root = blockman_.top_blocks_root();
3214 blocks_manager_type::gap_level_func gl_func(blockman_, glevel_len);
3218 blockman_.set_glen(glevel_len);
3223 template<
typename Alloc>
3227 unsigned top_blocks = blockman_.top_block_size();
3228 unsigned bvect_top_blocks = bv.blockman_.top_block_size();
3230 if (bvect_top_blocks > top_blocks) top_blocks = bvect_top_blocks;
3232 for (
unsigned i = 0; i < top_blocks; ++i)
3234 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3235 const bm::word_t*
const* arg_blk_blk = bv.blockman_.get_topblock(i);
3237 if (blk_blk == arg_blk_blk)
3247 arg_blk = arg_blk_blk ? arg_blk_blk[j] : 0;
3255 blk = blk_blk ? blk_blk[j] : 0;
3259 if (blk == arg_blk)
continue;
3264 if (!blk || !arg_blk)
3341 template<
typename Alloc>
3346 blockman_.swap(
bvect.blockman_);
3353 template<
typename Alloc>
3363 unsigned top_size = blockman_.top_block_size();
3365 size_t blocks_mem =
sizeof(blockman_);
3368 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3369 bm::word_t*** blk_root = blockman_.top_blocks_root();
3373 for (
unsigned i = 0; i < top_size; ++i)
3375 const bm::word_t*
const* blk_blk = blk_root[i];
3382 blk_blk = blk_root[i];
3405 size_t full_null_size = blockman_.calc_serialization_null_full();
3411 if (!safe_inc) safe_inc = 256;
3415 st->
memory_used += unsigned(
sizeof(*
this) -
sizeof(blockman_));
3424 template<
class Alloc>
3438 blockman_.check_allocate_block(nblock,
3448 gap_block_set(gap_blk, val, nblock, nbit);
3454 blk[nword] |= (1u << nbit);
3460 template<
class Alloc>
3464 if (!ids || !ids_size)
3466 if (!blockman_.is_init())
3467 blockman_.init_tree();
3469 import(ids, ids_size, so);
3475 template<
class Alloc>
3479 if (!ids || !ids_size || !blockman_.is_init())
3485 bv_tmp.
import(ids, ids_size, so);
3503 template<
class Alloc>
3507 if (!ids || !ids_size || !blockman_.is_init())
3512 bv_tmp.
import(ids, ids_size, so);
3529 template<
class Alloc>
3538 template<
class Alloc>
3547 template<
class Alloc>
3550 if (val == condition)
return false;
3556 return set_bit_conditional_impl(n, val, condition);
3561 template<
class Alloc>
3567 if (!blockman_.is_init())
3568 blockman_.init_tree();
3569 return and_bit_no_check(n, val);
3574 template<
class Alloc>
3579 if (!blockman_.is_init())
3580 blockman_.init_tree();
3591 template<
class Alloc>
3595 size_type n, start, stop = size_in;
3596 block_idx_type nblock;
3607 if (nblock == nblock_end)
3629 }
while (start < size_in);
3634 template<
class Alloc>
3636 block_idx_type nblock,
3637 size_type start, size_type stop)
3641 blockman_.check_allocate_block(nblock, 1, 0, &block_type,
true);
3645 blk = blockman_.deoptimize_block(nblock);
3660 template<
class Alloc>
3668 blockman_.check_allocate_block(nblock,
3682 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3695 if ( ((*word) & mask) == 0 )
3715 template<
class Alloc>
3717 bool val, block_idx_type nblock,
unsigned nbit)
3719 unsigned is_set, new_block_len;
3724 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
3725 if (new_block_len > threshold)
3727 blockman_.extend_gap_block(nblock, gap_blk);
3735 template<
class Alloc>
3741 blockman_.check_allocate_block(nblock,
3752 this->gap_block_set(gap_blk, !is_set, nblock, nbit);
3761 is_set = ((*word) & mask);
3763 *word = (is_set) ? (*word & ~mask) : (*word | mask);
3770 template<
class Alloc>
3779 blockman_.check_allocate_block(nblock,
3789 if (block_type == 1)
3794 if (old_val != condition)
3801 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3813 bool is_set = ((*word) & mask) != 0;
3815 if (is_set != condition)
3839 template<
class Alloc>
3847 blockman_.check_allocate_block(nblock,
3857 if (block_type == 1)
3862 bool new_val = val & old_val;
3863 if (new_val != old_val)
3865 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3877 bool is_set = ((*word) & mask) != 0;
3879 bool new_val = is_set & val;
3898 template<
class Alloc>
3907 pos = check_or_next(from);
3913 template<
class Alloc>
3918 unsigned top_blocks = blockman_.top_block_size();
3921 for (
unsigned i = top_blocks-1;
true; --i)
3923 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3950 pos = base_idx + block_pos;
3968 template<
class Alloc>
3973 unsigned top_blocks = blockman_.top_block_size();
3974 for (
unsigned i = 0; i < top_blocks; ++i)
3976 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3990 found =
true; block_pos = 0;
4002 pos = base_idx + block_pos;
4014 template<
class Alloc>
4017 bool found =
find(in_first);
4026 in_first = in_last = 0;
4033 template<
class Alloc>
4036 size_type& pos)
const 4042 if (!rank_in || !blockman_.is_init())
4047 unsigned bit_pos = 0;
4052 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4057 unsigned cnt = blockman_.block_bitcount(block);
4086 template<
class Alloc>
4090 const rs_index_type& rs_idx)
const 4097 !blockman_.is_init() ||
4098 (rs_idx.count() < rank_in))
4106 nb = rs_idx.find(rank_in);
4107 BM_ASSERT(rs_idx.rcount(nb) >= rank_in);
4109 rank_in -= rs_idx.rcount(nb-1);
4113 unsigned bit_pos = 0;
4115 for (; nb < rs_idx.get_total(); ++nb)
4118 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4123 unsigned block_bc = rs_idx.count(nb);
4124 if (rank_in <= block_bc)
4126 nbit = rs_idx.select_sub_range(nb, rank_in);
4132 rank_in -= block_bc;
4157 template<
class Alloc>
4159 const rs_index_type& rs_idx)
const 4164 !blockman_.is_init() ||
4165 (rs_idx.count() < rank_in))
4171 bool found = rs_idx.find(&rank_in, &nb, &sub_range_from);
4176 blockman_.get_block_coord(nb, i, j);
4177 const bm::word_t* block = blockman_.get_block_ptr(i, j);
4184 unsigned bit_pos = 0;
4193 template<
class Alloc>
4197 if (!blockman_.is_init())
4203 blockman_.get_block_coord(nb, i, j);
4204 const bm::word_t* block = blockman_.get_block_ptr(i, j);
4233 block_idx_type top_blocks = blockman_.top_block_size();
4234 for (; i < top_blocks; ++i)
4236 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4251 found =
true; block_pos = 0;
4263 prev = base_idx + block_pos;
4277 template<
class Alloc>
4281 if (!blockman_.is_init())
4284 size_type pos = this->check_or_next(prev);
4292 template<
class Alloc>
4300 template<
class Alloc>
4303 bool b = this->
test(0);
4310 template<
class Alloc>
4317 if (!blockman_.is_init())
4335 blockman_.get_block_coord(nb, i, j);
4336 bm::word_t* block = blockman_.get_block_ptr(i, j);
4338 if (!block && !value)
4343 block = blockman_.check_allocate_block(nb,
BM_BIT);
4345 block = blockman_.deoptimize_block(nb);
4356 blockman_.get_block_coord(nb, i0, j0);
4358 unsigned top_blocks = blockman_.top_block_size();
4359 bm::word_t*** blk_root = blockman_.top_blocks_root();
4365 if (i >= top_blocks)
4372 blk_blk = blk_root[i];
4383 blockman_.check_allocate_block(nblock, 0, 0, &block_type,
false);
4384 block[0] |= carry_over;
4387 blk_root = blockman_.top_blocks_root();
4388 blk_blk = blk_root[i];
4389 top_blocks = blockman_.top_block_size();
4400 blk_blk = blockman_.check_alloc_top_subblock(i);
4414 carry_over = 0; block = 0;
4419 if (0 != (block = blk_blk[j]))
4434 block = blockman_.deoptimize_block(nblock);
4435 block[0] <<= (carry_over = 1);
4444 block = blockman_.deoptimize_block(nblock);
4448 unsigned new_block_len;
4452 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4453 if (new_block_len > threshold)
4455 blockman_.extend_gap_block(nblock, gap_blk);
4471 blockman_.zero_block(nblock);
4475 blockman_.zero_block(nblock);
4487 template<
class Alloc>
4492 if (!blockman_.is_init())
4503 blockman_.get_block_coord(nb, i, j);
4504 bm::word_t* block = blockman_.get_block_ptr(i, j);
4505 bool carry_over = test_first_block_bit(nb+1);
4510 block = blockman_.check_allocate_block(nb,
BM_BIT);
4517 block = blockman_.deoptimize_block(nb);
4527 blockman_.get_block_coord(nb, i0, j0);
4529 unsigned top_blocks = blockman_.top_block_size();
4530 bm::word_t*** blk_root = blockman_.top_blocks_root();
4536 if (i >= top_blocks)
4539 blk_blk = blk_root[i];
4551 bool carry_over = 0;
4552 if (i + 1 < bm::set_top_array_size)
4555 carry_over = this->
test(co_idx);
4559 blk_blk = blockman_.check_alloc_top_subblock(i);
4563 blk_blk = blockman_.check_alloc_top_subblock(i);
4571 bool carry_over = 0;
4576 bool no_blocks = (j == 0);
4579 if (0 != (block = blk_blk[j]))
4587 if (j == bm::set_sub_array_size && no_blocks)
4589 blockman_.zero_block(i, j-1);
4597 carry_over = test_first_block_bit(nblock+1);
4602 block = blockman_.deoptimize_block(nblock);
4610 carry_over = test_first_block_bit(nblock+1);
4611 unsigned new_block_len;
4615 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4616 if (new_block_len > threshold)
4617 blockman_.extend_gap_block(nblock, gap_blk);
4621 blockman_.zero_block(i, j);
4629 blockman_.zero_block(i, j);
4632 if (carry_over && nblock)
4645 template<
class Alloc>
4656 template<
class Alloc>
4659 if (!bv.blockman_.is_init())
4665 unsigned top_blocks = blockman_.top_block_size();
4666 if (size_ < bv.size_)
4670 unsigned arg_top_blocks = bv.blockman_.top_block_size();
4671 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
4674 bm::word_t*** blk_root = blockman_.top_blocks_root();
4675 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
4677 for (
unsigned i = 0; i < top_blocks; ++i)
4680 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
4681 if (blk_blk == blk_blk_arg || !blk_blk_arg)
4683 if (!blk_blk && blk_blk_arg)
4687 blk_root[i] = blk_root_arg[i];
4688 blk_root_arg[i] = 0;
4695 blockman_.deallocate_top_subblock(i);
4705 blk = blk_blk[j]; arg_blk = blk_blk_arg[j];
4708 if (!blk && arg_blk)
4710 blockman_.set_block_ptr(i, j, arg_blk);
4711 bv.blockman_.set_block_ptr(i, j, 0);
4715 combine_operation_block_or(i, j, blk, arg_blk);
4724 template<
class Alloc>
4731 blockman_.get_block_coord(nb, i0, j0);
4732 bm::word_t* blk = blockman_.get_block_ptr(i0, j0);
4740 template<
class Alloc>
4746 if (blockman_.is_init())
4747 blockman_.deinit_tree();
4768 unsigned top_blocks1 = bman1.top_block_size();
4769 unsigned top_blocks2 = bman2.top_block_size();
4770 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4771 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4774 if (size_ < bv2.size_)
4777 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4778 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4780 for (
unsigned i = 0; i < top_blocks; ++i)
4782 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4783 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4785 if (blk_blk_arg1 == blk_blk_arg2)
4788 bm::word_t*** blk_root = blockman_.top_blocks_root();
4789 blk_root[i] = blk_blk_arg1;
4795 bm::word_t*** blk_root = blockman_.top_blocks_root();
4799 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4800 bool any_blocks =
false;
4804 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4805 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4806 if (arg_blk1 == arg_blk2 && !arg_blk1)
4808 bool need_opt = combine_operation_block_or(i, j, arg_blk1, arg_blk2);
4810 blockman_.optimize_bit_block(i, j);
4811 any_blocks |= bool(blk_blk[j]);
4815 blockman_.free_top_subblock(i);
4820 blockman_.free_temp_block();
4827 template<
class Alloc>
4833 if (blockman_.is_init())
4834 blockman_.deinit_tree();
4851 if (!bman1.is_init())
4857 if (!bman2.is_init())
4863 unsigned top_blocks1 = bman1.top_block_size();
4864 unsigned top_blocks2 = bman2.top_block_size();
4865 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4866 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4869 if (size_ < bv2.size_)
4872 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4873 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4875 for (
unsigned i = 0; i < top_blocks; ++i)
4877 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4878 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4880 if (blk_blk_arg1 == blk_blk_arg2)
4885 blockman_.deallocate_top_subblock(i);
4893 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4894 bool any_blocks =
false;
4899 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4900 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4902 if ((arg_blk1 == arg_blk2) &&
4906 bool need_opt = combine_operation_block_xor(i, j, arg_blk1, arg_blk2);
4908 blockman_.optimize_bit_block(i, j);
4909 any_blocks |= bool(blk_blk[j]);
4913 blockman_.free_top_subblock(i);
4918 blockman_.free_temp_block();
4925 template<
class Alloc>
4946 if (blockman_.is_init())
4947 blockman_.deinit_tree();
4951 if (!bman1.is_init() || !bman2.is_init())
4954 unsigned top_blocks1 = bman1.top_block_size();
4955 unsigned top_blocks2 = bman2.top_block_size();
4956 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4957 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4960 if (size_ < bv2.size_)
4963 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4964 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4966 for (
unsigned i = 0; i < top_blocks; ++i)
4968 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4969 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4971 if (blk_blk_arg1 == blk_blk_arg2)
4977 bm::word_t*** blk_root = blockman_.top_blocks_root();
4987 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4988 bool any_blocks =
false;
4993 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4994 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4996 if ((arg_blk1 == arg_blk2) && !arg_blk1)
4999 bool need_opt = combine_operation_block_and(i, j, arg_blk1, arg_blk2);
5001 blockman_.optimize_bit_block(i, j);
5002 any_blocks |= bool(blk_blk[j]);
5006 blockman_.free_top_subblock(i);
5011 blockman_.free_temp_block();
5019 template<
class Alloc>
5025 if (blockman_.is_init())
5026 blockman_.deinit_tree();
5044 if (!bman1.is_init())
5048 if (!bman2.is_init())
5054 unsigned top_blocks1 = bman1.top_block_size();
5055 unsigned top_blocks2 = bman2.top_block_size();
5056 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5057 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5060 if (size_ < bv2.size_)
5063 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5064 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5066 for (
unsigned i = 0; i < top_blocks; ++i)
5068 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5069 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5071 if (blk_blk_arg1 == blk_blk_arg2)
5078 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5079 bool any_blocks =
false;
5083 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5084 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5085 if ((arg_blk1 == arg_blk2) && !arg_blk1)
5088 bool need_opt = combine_operation_block_sub(i, j, arg_blk1, arg_blk2);
5090 blockman_.optimize_bit_block(i, j);
5091 any_blocks |= bool(blk_blk[j]);
5095 blockman_.free_top_subblock(i);
5100 blockman_.free_temp_block();
5108 #define BM_OR_OP(x) \ 5110 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \ 5111 if (blk != arg_blk) \ 5112 combine_operation_block_or(i, j+x, blk, arg_blk); \ 5115 template<
class Alloc>
5118 if (!bv.blockman_.is_init())
5121 unsigned top_blocks = blockman_.top_block_size();
5122 if (size_ < bv.size_)
5125 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5126 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5128 bm::word_t*** blk_root = blockman_.top_blocks_root();
5129 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5131 for (
unsigned i = 0; i < top_blocks; ++i)
5134 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5135 if (blk_blk == blk_blk_arg || !blk_blk_arg)
5141 blockman_.deallocate_top_subblock(i);
5146 blk_blk = blockman_.alloc_top_subblock(i);
5153 #if defined(BM64_AVX2) || defined(BM64_AVX512) 5154 if (!avx2_test_all_eq_wave2(blk_blk + j, blk_blk_arg + j))
5162 #elif defined(BM64_SSE4) 5181 #define BM_XOR_OP(x) \ 5183 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \ 5184 combine_operation_block_xor(i, j+x, blk, arg_blk); \ 5187 template<
class Alloc>
5190 if (!bv.blockman_.is_init())
5192 if (!blockman_.is_init())
5198 unsigned top_blocks = blockman_.top_block_size();
5199 if (size_ < bv.size_)
5203 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5204 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5206 bm::word_t*** blk_root = blockman_.top_blocks_root();
5207 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5209 for (
unsigned i = 0; i < top_blocks; ++i)
5211 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5215 if (blk_blk == blk_blk_arg)
5225 blk_blk = blockman_.check_alloc_top_subblock(i);
5238 blk_blk = blockman_.alloc_top_subblock(i);
5245 #if defined(BM64_AVX2) || defined(BM64_AVX512) 5246 if (!avx2_test_all_zero_wave2(blk_blk + j, blk_blk_arg + j))
5254 #elif defined(BM64_SSE4) 5274 #define BM_AND_OP(x) if (0 != (blk = blk_blk[j+x])) \ 5276 if (0 != (arg_blk = blk_blk_arg[j+x])) \ 5277 combine_operation_block_and(i, j+x, blk, arg_blk); \ 5279 blockman_.zero_block(i, j+x); \ 5282 template<
class Alloc>
5285 if (!blockman_.is_init())
5287 if (!bv.blockman_.is_init())
5293 unsigned top_blocks = blockman_.top_block_size();
5294 if (size_ < bv.size_)
5297 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5298 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5301 bm::word_t*** blk_root = blockman_.top_blocks_root();
5302 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5304 for (
unsigned i = 0; i < top_blocks; ++i)
5309 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5319 blockman_.zero_block(i, j);
5320 blockman_.deallocate_top_subblock(i);
5328 blk_blk = blockman_.check_alloc_top_subblock(i);
5335 #if defined(BM64_AVX2) || defined(BM64_AVX512) 5336 if (!avx2_test_all_zero_wave(blk_blk + j))
5344 #elif defined(BM64_SSE4) 5364 #define BM_SUB_OP(x) \ 5365 if ((0 != (blk = blk_blk[j+x])) && (0 != (arg_blk = blk_blk_arg[j+x]))) \ 5366 combine_operation_block_sub(i, j+x, blk, arg_blk); 5369 template<
class Alloc>
5372 if (!blockman_.is_init() || !bv.blockman_.is_init())
5375 unsigned top_blocks = blockman_.top_block_size();
5376 if (size_ < bv.size_)
5379 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5380 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5382 bm::word_t*** blk_root = blockman_.top_blocks_root();
5383 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5385 for (
unsigned i = 0; i < top_blocks; ++i)
5388 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5389 if (!blk_blk || !blk_blk_arg)
5394 blockman_.deallocate_top_subblock(i);
5398 blk_blk = blockman_.check_alloc_top_subblock(i);
5405 #if defined(BM64_AVX2) || defined(BM64_AVX512) 5406 if (!avx2_test_all_zero_wave(blk_blk + j))
5414 #elif defined(BM64_SSE4) 5433 template<
class Alloc>
5438 if (!blockman_.is_init())
5442 blockman_.init_tree();
5445 unsigned top_blocks = blockman_.top_block_size();
5446 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5448 if (arg_top_blocks > top_blocks)
5449 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5451 if (size_ < bv.size_)
5455 blockman_.reserve_top_blocks(arg_top_blocks);
5456 top_blocks = blockman_.top_block_size();
5459 if (size_ > bv.size_)
5464 if (arg_top_blocks < top_blocks)
5467 top_blocks = arg_top_blocks;
5472 bm::word_t*** blk_root = blockman_.top_blocks_root();
5473 unsigned block_idx = 0;
5477 top_blocks = blockman_.top_block_size();
5478 if (top_blocks < bv.blockman_.top_block_size())
5482 top_blocks = bv.blockman_.top_block_size();
5486 for (i = 0; i < top_blocks; ++i)
5496 const bm::word_t*
const* bvbb = bv.blockman_.get_topblock(i);
5506 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5524 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5531 blockman_.zero_block(i, j);
5542 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5555 template<
class Alloc>
5564 blockman_.clone_assign_block(i, j, arg_blk2);
5569 blockman_.clone_assign_block(i, j, arg_blk1);
5581 if (is_gap1 | is_gap2)
5583 if (is_gap1 & is_gap2)
5589 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5597 arg_block = arg_blk2;
5602 arg_block = arg_blk1;
5605 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5613 bm::word_t* block = blockman_.borrow_tempblock();
5614 blockman_.set_block_ptr(i, j, block);
5620 blockman_.return_tempblock(block);
5628 template<
class Alloc>
5638 blockman_.clone_assign_block(i, j, arg_blk2);
5643 blockman_.clone_assign_block(i, j, arg_blk1);
5649 blockman_.clone_assign_block(i, j, arg_blk2,
true);
5655 blockman_.clone_assign_block(i, j, arg_blk1,
true);
5662 if (is_gap1 | is_gap2)
5664 if (is_gap1 & is_gap2)
5670 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5678 arg_block = arg_blk2;
5683 arg_block = arg_blk1;
5686 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5694 bm::word_t* block = blockman_.borrow_tempblock();
5695 blockman_.set_block_ptr(i, j, block);
5700 blockman_.set_block_ptr(i, j, 0);
5701 blockman_.return_tempblock(block);
5710 template<
class Alloc>
5718 if (!arg_blk1 || !arg_blk2)
5729 blockman_.clone_assign_block(i, j, arg_blk2);
5734 blockman_.clone_assign_block(i, j, arg_blk1);
5741 if (is_gap1 | is_gap2)
5743 if (is_gap1 & is_gap2)
5749 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5757 arg_block = arg_blk2;
5762 arg_block = arg_blk1;
5765 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5773 bm::word_t* block = blockman_.borrow_tempblock();
5774 blockman_.set_block_ptr(i, j, block);
5779 blockman_.set_block_ptr(i, j, 0);
5780 blockman_.return_tempblock(block);
5789 template<
class Alloc>
5801 blockman_.clone_assign_block(i, j, arg_blk1);
5812 if (is_gap1 | is_gap2)
5814 if (is_gap1 & is_gap2)
5820 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5826 bm::word_t* block = blockman_.borrow_tempblock();
5827 blockman_.set_block_ptr(i, j, block);
5832 blockman_.set_block_ptr(i, j, 0);
5833 blockman_.return_tempblock(block);
5839 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_blk1);
5847 bm::word_t* block = blockman_.borrow_tempblock();
5848 blockman_.set_block_ptr(i, j, block);
5853 blockman_.set_block_ptr(i, j, 0);
5854 blockman_.return_tempblock(block);
5863 template<
class Alloc>
5865 unsigned i,
unsigned j,
5875 blockman_.zero_block(i, j);
5889 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
5894 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
5898 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
5899 blockman_.set_block_ptr(i, j, new_blk);
5911 blk = blockman_.clone_gap_block(arg_gap, gap);
5912 blockman_.set_block(i, j, blk, gap);
5924 blk = blockman_.alloc_.alloc_bit_block();
5926 blockman_.set_block_ptr(i, j, blk);
5934 blockman_.get_allocator().free_bit_block(blk);
5942 template<
class Alloc>
5944 unsigned i,
unsigned j,
5960 blockman_.set_block_ptr(i, j, 0);
5976 blk = blockman_.get_allocator().alloc_bit_block();
5978 blockman_.set_block_ptr(i, j, blk);
5994 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
5999 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6003 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
6004 blockman_.set_block_ptr(i, j, new_blk);
6016 blk = blockman_.clone_gap_block(arg_gap, gap);
6017 blockman_.set_block(i, j, blk, gap);
6028 blk = blockman_.alloc_.alloc_bit_block();
6030 blockman_.set_block_ptr(i, j, blk);
6037 blockman_.get_allocator().free_bit_block(blk);
6038 blockman_.set_block_ptr(i, j, 0);
6046 template<
class Alloc>
6069 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6074 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6084 blockman_.get_allocator().free_bit_block(new_blk);
6091 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
6092 blockman_.set_block_ptr(i, j, new_blk);
6103 blockman_.zero_block(i, j);
6111 bm::word_t* new_blk = blockman_.clone_gap_block(arg_gap, is_new_gap);
6115 blockman_.set_block_ptr(i, j, new_blk);
6123 blockman_.zero_block(i, j);
6133 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6135 blockman_.set_block_ptr(i, j, new_blk);
6142 blockman_.get_allocator().free_bit_block(blk);
6143 blockman_.set_block_ptr(i, j, 0);
6149 template<
class Alloc>
6157 blockman_.zero_block(i, j);
6176 BM_ASSERT(!(res == tmp_buf && res_len == 0));
6178 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6182 blk = blockman_.convert_gap2bitset(i, j, gap_blk);
6194 blockman_.zero_block(i, j);
6209 if (!dst || !arg_blk)
6213 if (ret && ret == arg_blk)
6215 ret = blockman_.get_allocator().alloc_bit_block();
6221 blockman_.set_block_ptr(i, j, ret);
6223 blockman_.get_allocator().free_bit_block(dst);
6229 template<
class Alloc>
6244 if (!blk && arg_gap)
6246 blk = blockman_.clone_gap_block(
BMGAP_PTR(arg_blk), gap);
6247 blockman_.set_block(nb, blk, gap);
6266 BM_ASSERT(!(res == tmp_buf && res_len == 0));
6271 blockman_.zero_block(nb);
6273 blockman_.assign_gap(nb, res, ++res_len, blk, tmp_buf);
6286 blockman_.zero_block(nb);
6295 blk = blockman_.convert_gap2bitset(nb, gap_blk);
6311 if (opcode !=
BM_OR)
6315 blockman_.zero_block(nb);
6334 if (dst == 0 && arg_blk == 0)
6346 ret = blockman_.get_allocator().alloc_bit_block();
6368 }
while (wrd_ptr < wrd_end);
6378 ret = blockman_.get_allocator().alloc_bit_block();
6385 if (ret && ret == arg_blk)
6387 ret = blockman_.get_allocator().alloc_bit_block();
6398 blockman_.set_block(nb, ret);
6401 blockman_.get_allocator().free_bit_block(dst);
6408 template<
class Alloc>
6428 unsigned nbit_left = unsigned(left & bm::set_block_mask);
6435 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6439 blockman_.get_block_coord(nblock_left, i, j);
6440 block = blockman_.get_block_ptr(i, j);
6447 if (nblock_left == nblock_right)
6459 blockman_.set_all_set(nb, nb_to-1);
6462 if (nb_to > nblock_right)
6465 blockman_.get_block_coord(nblock_right, i, j);
6466 block = blockman_.get_block_ptr(i, j);
6468 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6482 template<
class Alloc>
6503 unsigned nbit_left = unsigned(left & bm::set_block_mask);
6510 bm::gap_init_range_block<gap_word_t>(tmp_gap_blk,
6514 blockman_.get_block_coord(nblock_left, i, j);
6515 block = blockman_.get_block_ptr(i, j);
6523 if (nblock_left == nblock_right)
6525 nb = nblock_left + 1;
6535 blockman_.set_all_zero(nb, nb_to - 1u);
6538 if (nb_to > nblock_right)
6541 blockman_.get_block_coord(nblock_right, i, j);
6542 block = blockman_.get_block_ptr(i, j);
6543 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6559 template<
class Alloc>
6564 if (!bvect.blockman_.is_init())
6570 if (blockman_.is_init())
6572 blockman_.deinit_tree();
6577 copy_range_no_check(bvect, left, right);
6582 template<
class Alloc>
6594 blockman_.copy(bvect.blockman_, nblock_left, nblock_right);
6601 clear_range_no_check(from, left-1);
6607 if (found && (last > right))
6608 clear_range_no_check(right+1, last);
6614 template<
class Alloc>
6618 throw std::bad_alloc();
6620 BM_THROW(BM_ERR_BADALLOC);
6633 #pragma warning( pop ) Bitvector Bit-vector container with runtime compression of bits.
unsigned gap_capacity(const T *buf, const T *glevel_len)
Returs GAP block capacity.
bool operator==(const bvector< Alloc > &bv) const
bvector_type * get_bvector() const
bulk_insert_iterator & operator*()
Output iterator iterator designed to set "ON" bits based on input sequence of integers.
const blocks_manager_type & get_blocks_manager() const
get access to memory manager (internal) Use only if you are BitMagic library
void bit_block_copy(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
Bitblock copy operation.
bool find(size_type &pos) const
Finds index of first 1 bit.
bvector< Alloc > & reset()
Clears every bit in the bitvector.
void add_bit_block()
cound bit block
void gap_add_to_bitset(unsigned *dest, const T *pcurr, unsigned len)
Adds(OR) GAP block to bitblock.
void combine_operation_and(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation AND
unsigned gap_find_last(const T *buf, unsigned *last)
GAP block find the last set bit.
void bit_invert(T *start)
bm::word_t * bit_operation_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
Block OR operation. Makes analysis if block is 0 or FULL.
BMFORCEINLINE gap_word_t * gap_operation_xor(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize)
GAP XOR operation.
void bit_andnot_arr_ffmask(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
bitblock AND NOT with constant ~0 mask operation.
bool set_bit_and(size_type n, bool val=true)
Sets bit n using bit AND with the provided value.
void clear_bit_no_check(size_type n)
Clears bit n without precondiion checks.
enumerator & go_up()
Advance enumerator to the next available bit.
size_type rank(size_type n, const rs_index_type &rs_idx) const
Returns rank of specified bit position.
bool operator!=(const bvector< Alloc > &bv) const
void import_block(const size_type *ids, block_idx_type nblock, size_type start, size_type stop)
allocator_pool_type * get_allocator_pool()
Get curent allocator pool (if set)
counted_enumerator & operator++()
BMFORCEINLINE bool sse42_test_all_zero_wave2(const void *ptr0, const void *ptr1)
check if 2 waves of pointers are all NULL
#define VECT_XOR_ARR_2_MASK(dst, src, src_end, mask)
std::output_iterator_tag iterator_category
bm::id64_t bit_block_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
Plain bitblock XOR operation. Function does not analyse availability of source and destination blocks...
const unsigned set_block_size
void swap(bvector< Alloc > &bvect) BMNOEXEPT
Exchanges content of bv and this bvector.
bool find_range(size_type &first, size_type &last) const
Finds dynamic range of bit-vector [first, last].
size_type count_range(size_type left, size_type right, const rs_index_type &rs_idx) const
Returns count of 1 bits in the given range [left..right] Uses rank-select index to accelerate the sea...
std::output_iterator_tag iterator_category
bool operator>=(const bvector< Alloc > &bv) const
unsigned char bits[set_bitscan_wave_size *32]
bit list
const unsigned set_word_shift
void combine_operation_or(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation OR
rs_index< allocator_type > blocks_count
size_type pos
Last bit position decode before.
enumerator & go_to(size_type pos)
go to a specific position in the bit-vector (or next)
const unsigned set_sub_array_size
void gap_xor_to_bitset(unsigned *dest, const T *pcurr)
XOR GAP block to bitblock.
void combine_operation_xor(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation XOR
bvector< Alloc > & set()
Sets every bit in this bitset to 1.
rs_index< allocator_type > rs_index_type
reference operator[](size_type n)
bool for_each_nzblock_if(T ***root, BI size1, F &f)
#define FULL_SUB_BLOCK_REAL_ADDR
insert_iterator(bvector< Alloc > &bvect)
reference(bvector< Alloc > &bv, size_type position)
bool find_not_null_ptr(bm::word_t ***arr, N start, N size, N *pos)
unsigned short cnt
Number of ON bits.
void optimize_gap_size()
Optimize sizes of GAP blocks.
Constants, tables and typedefs.
bulk_insert_iterator & operator++()
const unsigned set_array_shift
bool compare_state(const iterator_base &ib) const
Compare FSMs for testing purposes.
#define IS_VALID_ADDR(addr)
Information about current bitblock.
void import(const size_type *ids, size_type ids_size, bm::sort_order sorted_idx)
Import integers (set bits).
size_type capacity() const
Returns bvector's capacity (number of bits it can store)
enumerator(const bvector< Alloc > *bv)
Construct enumerator associated with a vector. This construction creates unpositioned iterator with s...
size_type size() const
return current size of the vector (bits)
unsigned gap_set_value(unsigned val, T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set)
Sets or clears bit in the GAP buffer.
const unsigned set_top_array_size
bulk_insert_iterator(bulk_insert_iterator &&iit) BMNOEXEPT
unsigned long long int id64_t
gap_word_t gap_len
Current dgap length.
void assign_if_not_set(allocator_pool_type &pool, bvector< Alloc > &bv)
enumerator operator++(int)
Advance enumerator forward to the next available bit. Possibly do NOT use this operator it is slower ...
gap_word_t * gap_operation_sub(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize)
GAP SUB (AND NOT) operation.
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv)
2 operand logical XOR
void go_first()
Position enumerator to the first available bit.
void optimize(bm::word_t *temp_block=0, optmode opt_mode=opt_compress, statistics *stat=0)
Optimize memory bitvector's memory allocation.
void invalidate()
Turns iterator into an invalid state.
const unsigned short set_bitscan_wave_size
void set_block_bits_u64(bm::word_t *BMRESTRICT block, const bm::id64_t *BMRESTRICT idx, bm::id64_t start, bm::id64_t stop)
set bits in a bit-block using global index
BMFORCEINLINE bool sse42_test_all_eq_wave2(const void *ptr0, const void *ptr1)
check if wave of 2 pointers are the same (null or FULL)
block_idx_type count_blocks(unsigned *arr) const
Computes bitcount values for all bvector blocks.
void for_each_nzblock_range(T ***root, N top_size, N nb_from, N nb_to, F &f)
bulk_insert_iterator & operator=(bulk_insert_iterator &&ii) BMNOEXEPT
insert_iterator inserter()
const unsigned gap_equiv_len
counted_enumerator(const enumerator &en)
insert_iterator(const insert_iterator &iit)
bvector(bvector< Alloc > &&bvect) BMNOEXEPT
Move constructor.
bm::bvector< Alloc > bvector_type
unsigned bit_block_find(const bm::word_t *block, unsigned nbit, unsigned *pos)
Searches for the next 1 bit in the BIT block.
size_t max_serialize_mem
estimated maximum memory for serialization
void gap_convert_to_bitset(unsigned *dest, const T *buf)
GAP block to bitblock conversion.
pre-processor un-defines to avoid global space pollution (internal)
size_type get_next(size_type prev) const
Finds the number of the next bit ON.
const bm::word_t * ptr
Word pointer.
bool operator==(const iterator_base &it) const
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand XOR : this := bv1 XOR bv2
const bm::word_t * block_
Block pointer.(NULL-invalid)
bm::word_t * bit_operation_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
bitblock SUB operation.
insert_iterator & operator++(int)
bool inc(size_type n)
Increment the specified element.
int(* bit_visitor_callback_type)(void *handle_ptr, bm::id_t bit_idx)
Callback type to visit (callback style) bits in bit-vector(s)
const reference & operator|=(bool value) const
bm::bvector< Alloc > bvector_type
size_type position_
Bit position (bit idx)
BMFORCEINLINE bm::gap_word_t gap_length(const bm::gap_word_t *buf)
Returs GAP block length.
SIMD target version definitions.
void set_new_blocks_strat(strategy strat)
Sets new blocks allocation strategy.
bitblock_descr bit_
BitBlock related info.
bool bit_block_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
Plain bitblock OR operation. Function does not analyse availability of source and destination blocks...
bvector(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy constructor for range copy [left..right].
reference(const reference &ref)
void xor_swap(W &x, W &y)
XOR swap two scalar variables.
bool select(size_type rank, size_type &pos, const rs_index_type &rs_idx) const
select bit-vector position for the specified rank(bitcount)
compress blocks when possible (GAP/prefix sum)
bool bit_block_shift_l1_unr(bm::word_t *block, bm::word_t *empty_acc, bm::word_t co_flag)
Left bit-shift of bit-block by 1 bit (loop unrolled)
void clear(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
clear list of bits in this bitset
bm::word_t bit_block_insert(bm::word_t *block, unsigned bitpos, bool value)
insert bit into position and shift the rest right with carryover
unsigned short idx
Current position in the bit list.
int compare(const bvector< Alloc > &bvect) const
Lexicographical comparison with a bitvector.
Constant iterator designed to enumerate "ON" bits counted_enumerator keeps bitcount, ie number of ON bits starting from the position 0 in the bit string up to the currently enumerated bit.
#define BM_DECLARE_TEMP_BLOCK(x)
Statistical information about bitset's memory allocation details.
bool gap_shift_l1(T *buf, unsigned co_flag, unsigned *new_len)
Left shift GAP block by 1 bit.
bvector_type * bvect_
target bvector
counted_enumerator & operator=(const enumerator &en)
const unsigned rs3_border1
bool bit_block_shift_r1_unr(bm::word_t *block, bm::word_t *empty_acc, bm::word_t co_flag)
Right bit-shift of bit-block by 1 bit (loop unrolled)
unsigned idx_arr_block_lookup_u32(const unsigned *idx, unsigned size, unsigned nb, unsigned start)
block boundaries look ahead U32
#define BMSET_PTRGAP(ptr)
BMFORCEINLINE bool sse42_test_all_zero_wave(const void *ptr)
check if wave of pointers is all NULL
const unsigned rs3_border0
void combine_operation_sub(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation MINUS (AND NOT)
void gap_and_to_bitset(unsigned *dest, const T *pcurr)
ANDs GAP block to bitblock.
unsigned gap_bit_count_to(const T *const buf, T right)
Counts 1 bits in GAP buffer in the closed [0, right] range.
bvector< Alloc > & flip()
Flips all bits.
const reference & operator=(bool value) const
void build_rs_index(rs_index_type *rs_idx, bvector< Alloc > *bv_blocks=0) const
compute running total of all blocks in bit vector (rank-select index)
blocks_manager_type::block_idx_type block_idx_type
bvector< Alloc > operator~() const
bvector_type::size_type size_type
BMFORCEINLINE bool gap_is_all_zero(const bm::gap_word_t *buf)
Checks if GAP block is all-zero.
size_t memory_used
memory usage for all blocks and service tables
size_type count_to_test(size_type n, const rs_index_type &blocks_cnt) const
popcount in [0..right] range if test(right) == true
enumerator first() const
Returns enumerator pointing on the first non-zero bit.
void copy_range(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy all bits in the specified closed interval [left,right].
input set is sorted (ascending order)
void combine_operation_with_block(block_idx_type nb, const bm::word_t *arg_blk, bool arg_gap, bm::operation opcode)
void gap_invert(T *buf)
Inverts all bits in the GAP buffer.
sort_order
Sort order declaration.
SIZE_TYPE block_find_rank(const bm::word_t *const block, SIZE_TYPE rank, unsigned nbit_from, unsigned &nbit_pos)
Find rank in block (GAP or BIT)
int bitcmp(const T *buf1, const T *buf2, unsigned len)
Lexicographical comparison of BIT buffers.
bvector< Alloc > & set(size_type n, bool val=true)
Sets bit n if val is true, clears bit n if val is false.
unsigned bit_find_last(const bm::word_t *block, unsigned *last)
BIT block find the last set bit (backward search)
bvector(const bvector< Alloc > &bvect)
Copy constructor.
bool operator>(const bvector< Alloc > &bv) const
size_t ptr_sub_blocks
Number of sub-blocks.
void operator-=(const bvector< Alloc > &bv)
#define IS_FULL_BLOCK(addr)
size_type get_first() const
find first 1 bit in vector. Function may return 0 and this requires an extra check if bit 0 is actual...
bulk_insert_iterator & operator++(int)
bulk_insert_iterator(bvector< Alloc > &bvect, bm::sort_order so=BM_UNKNOWN)
bvector(strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, size_type bv_size=bm::id_max, const Alloc &alloc=Alloc())
Constructs bvector class.
insert_iterator & operator=(size_type n)
bool bit_is_all_zero(const bm::word_t *BMRESTRICT start)
Returns "true" if all bits in the block are 0.
Free unused 0 and 1 blocks.
Default SIMD friendly allocator.
allocation_policy(bm::strategy s=BM_BIT, const gap_word_t *glevels=bm::gap_len_table< true >::_len)
const gap_word_t * glevel_len
bvector< Alloc > operator &(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
const reference & operator=(const reference &ref) const
unsigned gap_bfind(const T *buf, unsigned pos, unsigned *is_set)
void move_from(bvector< Alloc > &bvect) BMNOEXEPT
Move bvector content from another bvector.
No GAP compression strategy. All new blocks are bit blocks.
Bit manipulation primitives (internal)
Encoding utilities for serialization (internal)
bool operator!=(const iterator_base &it) const
bool shift_left()
Shift left by 1 bit, fill with zero return carry out.
unsigned gap_find_first(const T *buf, unsigned *first)
GAP block find the first set bit.
bvector(std::initializer_list< size_type > il)
Brace constructor.
unsigned gap_bit_count_range(const T *const buf, unsigned left, unsigned right)
Counts 1 bits in GAP buffer in the closed [left, right] range.
counted_enumerator operator++(int)
unsigned gap_test_unr(const T *buf, const unsigned pos)
Tests if bit = pos is true. Analog of bm::gap_test with SIMD unrolling.
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand SUB : this := bv1 MINUS bv2 SUBtraction is also known as AND NOT
bvector< Alloc > operator^(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
void bit_block_set(bm::word_t *BMRESTRICT dst, bm::word_t value)
Bitblock memset operation.
bm::id64_t update_block_digest0(const bm::word_t *const block, bm::id64_t digest)
Compute digest for 64 non-zero areas based on existing digest (function revalidates zero areas) ...
bm::id_t bit_block_calc_count_range(const bm::word_t *block, bm::word_t left, bm::word_t right)
const gap_word_t * ptr
Word pointer.
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv)
2 operand logical SUB(AND NOT). Also known as MINUS.
const unsigned gap_levels
BMFORCEINLINE unsigned word_bitcount64(bm::id64_t x)
Default GAP lengths table.
bvector_type::size_type value_type
bool operator==(const reference &ref) const
const unsigned set_array_mask
insert_iterator & operator*()
size_type count() const
population cout (count of ON bits)
bm::bvector< Alloc > * bv_
Pointer on parent bitvector.
unsigned short gap_word_t
void init()
Explicit post-construction initialization.
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv)
2 operand logical AND
friend class iterator_base
blocks_manager< Alloc > blocks_manager_type
mem_pool_guard(allocator_pool_type &pool, bvector< Alloc > &bv)
bool set_bit_conditional(size_type n, bool val, bool condition)
Sets bit n only if current value equals the condition.
void gap_sub_to_bitset(unsigned *dest, const T *pcurr)
SUB (AND NOT) GAP block to bitblock.
bool valid() const
Checks if iterator is still valid.
const unsigned bits_in_array
const reference & operator^=(bool value) const
void set_gap_levels(const gap_word_t *glevel_len)
Sets new GAP lengths table. All GAP blocks will be reallocated to match the new scheme.
gap_word_t * gap_operation_or(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize)
GAP OR operation.
const unsigned rs3_half_span
bulk_insert_iterator(const bulk_insert_iterator &iit)
bool clear_bit(size_type n)
Clears bit n.
bvector< Alloc > & set_range(size_type left, size_type right, bool value=true)
Sets all bits in the specified closed interval [left,right] Interval must be inside the bvector's siz...
dgap_descr gap_
DGAP block related info.
void(* gap_operation_to_bitset_func_type)(unsigned *, const gap_word_t *)
bulk_insert_iterator(const insert_iterator &iit)
bool operator[](size_type n) const
insert_iterator & operator=(const insert_iterator &ii)
bool bit_block_or_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2)
2 way (target := source1 | source2) bitblock OR operation.
bool operator<=(const bvector< Alloc > &bv) const
bvector(size_type bv_size, strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, const Alloc &alloc=Alloc())
Constructs bvector class.
size_type * buf_
bulk insert buffer
std::input_iterator_tag iterator_category
static gap_operation_to_bitset_func_type gap_op_to_bit(unsigned i)
bm::id64_t idx_arr_block_lookup_u64(const bm::id64_t *idx, bm::id64_t size, bm::id64_t nb, bm::id64_t start)
block boundaries look ahead U32
enumerator end() const
Returns enumerator pointing on the next bit after the last.
void reset()
Reset statisctics.
Information about current DGAP block.
BMFORCEINLINE gap_word_t * gap_operation_and(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize)
GAP AND operation.
allocator_type::allocator_pool_type allocator_pool_type
bool find_rank(size_type rank, size_type from, size_type &pos) const
Find bit-vector position for the specified rank(bitcount)
#define FULL_BLOCK_REAL_ADDR
enumerator & operator++()
Advance enumerator forward to the next available bit.
#define BM_ASSERT_THROW(x, xerrcode)
enumerator get_enumerator(size_type pos) const
Returns enumerator pointing on specified or the next available bit.
bool any() const
Returns true if any bits in this bitset are set, and otherwise returns false.
Algorithms for fast aggregation of a group of bit-vectors.
bvector & operator=(bvector< Alloc > &&bvect) BMNOEXEPT
Move assignment operator.
void calc_stat(struct bm::bvector< Alloc >::statistics *st) const
Calculates bitvector statistics.
void set_block_bits_u32(bm::word_t *BMRESTRICT block, const unsigned *BMRESTRICT idx, unsigned start, unsigned stop)
set bits in a bit-block using global index
bm::word_t * bit_operation_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
bitblock XOR operation.
bm::id64_t bit_block_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
Plain bitblock SUB (AND NOT) operation. Function does not analyse availability of source and destinat...
const unsigned bits_in_block
const unsigned set_block_size_op
union bm::bvector::iterator_base::block_descr bdescr_
bool find_reverse(size_type &pos) const
Finds last index of 1 bit.
size_type extract_next(size_type prev)
Finds the number of the next bit ON and sets it to 0.
void keep(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
Keep list of bits in this bitset, others are cleared.
bm::sort_order sorted_
sort order hint
const unsigned gap_max_bits
Class reference implements an object for bit assignment.
void merge(bm::bvector< Alloc > &bvect)
Merge/move content from another vector.
enumerator & skip_to_rank(size_type rank)
Skip to specified relative rank.
enumerator & skip(size_type rank)
Skip specified number of bits from enumeration.
bool test(size_type n) const
returns true if bit n is set and false is bit n is 0.
bool improve_gap_levels(const T *length, const T *length_end, T *glevel_len)
Finds optimal gap blocks lengths.
const unsigned set_block_mask
static void throw_bad_alloc()
block_idx_type block_idx_
Block index.
unsigned bit_find_first(const bm::word_t *block, unsigned *first)
BIT block find the first set bit.
insert_iterator & operator++()
bulk_insert_iterator & operator=(size_type n)
Constant iterator designed to enumerate "ON" bits.
optmode
Optimization mode Every next level means additional checks (better compression vs time) ...
void resize(size_type new_size)
Change size of the bvector.
bm::id64_t bit_block_sub_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest)
digest based bitblock SUB (AND NOT) operation (3 operand)
strategy get_new_blocks_strat() const
Returns blocks allocation strategy.
const unsigned set_word_mask
bm::word_t * bit_operation_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
bitblock AND operation.
bvector< Alloc > & invert()
Invert/NEG all bits It should be noted, invert is affected by size() if size is set - it only inverts...
static size_type buf_size_max()
bvector< Alloc > operator|(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand AND : this := bv1 AND bv2
unsigned short bitscan_wave(const bm::word_t *w_ptr, unsigned char *bits)
Unpacks word wave (Nx 32-bit words)
bool gap_shift_r1(T *buf, unsigned co_flag, unsigned *new_len)
Right shift GAP block by 1 bit.
bvector_type * get_bvector() const
gap_word_t gap_levels[bm::gap_levels]
GAP block lengths in the bvect.
bm::id64_t bit_block_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src)
Plain bitblock AND operation. Function does not analyse availability of source and destination blocks...
unsigned gap_limit(const T *buf, const T *glevel_len)
Returs GAP block capacity limit.
bvector & operator=(const bvector< Alloc > &bvect)
Copy assignment operator.
unsigned * gap_convert_to_bitset_smart(unsigned *dest, const T *buf, id_t set_max)
Smart GAP block to bitblock conversion.
size_type value() const
Get current position (value)
bulk_insert_iterator & operator=(const bulk_insert_iterator &ii)
void clear(bool free_mem=false)
Clears every bit in the bitvector.
bool operator<(const bvector< Alloc > &bv) const
Base class for all iterators.
bm::id64_t bit_block_xor_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2)
2 way (target := source1 ^ source2) bitblock XOR operation.
static gap_operation_func_type gap_operation(unsigned i)
const unsigned set_total_blocks
size_t bv_count
Number of bit-vectors.
bool none() const
Returns true if no bits are set, otherwise returns false.
void advance()
advance iterator forward by one
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv)
2 operand logical OR
size_type buf_size_
current buffer size
bm::id64_t calc_block_digest0(const bm::word_t *const block)
Compute digest for 64 non-zero areas.
void set_allocator_pool(allocator_pool_type *pool_ptr)
Set allocator pool for local (non-threaded) memory cyclic(lots of alloc-free ops) opertations...
Output iterator iterator designed to set "ON" bits based on input sequence of integers (bit indeces)...
void combine_operation(const bm::bvector< Alloc > &bvect, bm::operation opcode)
perform a set-algebra operation by operation code
void erase(size_type n)
Erase bit in the specified position All the vector content after erase position is shifted left...
size_type count() const
Number of bits ON starting from the .
bool is_bits_one(const bm::wordop_t *start)
Returns "true" if all bits in the block are 1.
blocks_manager_type & get_blocks_manager()
get access to memory manager (internal) Use only if you are BitMagic library
Structure with statistical information about memory allocation footprint, serialization projection...
const unsigned set_block_shift
strategy
Block allocation strategies.
bm::id64_t bit_block_and_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest)
digest based bit-block AND
bm::id_t bit_block_calc_count_to(const bm::word_t *block, bm::word_t right)
std::input_iterator_tag iterator_category
void operator^=(const bvector< Alloc > &bv)
bool get_bit(size_type n) const
returns true if bit n is set and false is bit n is 0.
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand OR : this := bv1 OR bv2
void for_each_nzblock(T ***root, unsigned size1, F &f)
void operator&=(const bvector< Alloc > &bv)
bool shift_right()
Shift right by 1 bit, fill with zero return carry out.
enumerator(const bvector< Alloc > *bv, size_type pos)
Construct enumerator for bit vector.
int gapcmp(const T *buf1, const T *buf2)
Lexicographical comparison of GAP buffers.
void add_gap_block(unsigned capacity, unsigned length)
count gap block
size_type count_to(size_type n, const rs_index_type &rs_idx) const
Returns count of 1 bits (population) in [0..right] range.
void operator|=(const bvector< Alloc > &bv)
void set_bit_no_check(size_type n)
Set bit without checking preconditions (size, etc)
const id64_t all_bits_mask
bvector< Alloc > operator-(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
void sync_size()
Syncronize size if it got extended due to bulk import.
#define FULL_BLOCK_FAKE_ADDR
size_type operator*() const
Get current position (value)
bool set_bit(size_type n, bool val=true)
Sets bit n.
unsigned block_type_
Type of block. 0-Bit, 1-GAP.
gap_word_t *(* gap_operation_func_type)(const gap_word_t *BMRESTRICT, const gap_word_t *BMRESTRICT, gap_word_t *BMRESTRICT, unsigned &)
unsigned gap_block_find(const T *buf, unsigned nbit, bm::id_t *prev)
Searches for the next 1 bit in the GAP block.
Alloc get_allocator() const
#define BLOCK_ADDR_SAN(addr)
bool insert(size_type n, bool value)
Insert bit into specified position All the vector content after insert position is shifted right...
bvector< Alloc > & flip(size_type n)
Flips bit n.
void bit_block_erase(bm::word_t *block, unsigned bitpos, bool carry_over)
erase bit from position and shift the rest right with carryover