1 #ifndef BMUTIL__H__INCLUDED__
2 #define BMUTIL__H__INCLUDED__
29 #if defined(__arm64__) || defined(__arm__)
32 #if defined(_M_AMD64) || defined(_M_X64)
34 #elif defined(__x86_64__)
35 #include <x86intrin.h>
40 #pragma GCC diagnostic push
41 #pragma GCC diagnostic ignored "-Wconversion"
45 #pragma warning( push )
46 #pragma warning( disable : 4146)
65 #if defined(BMAVX512OPT)
68 #if defined(BMAVX2OPT)
71 #if defined(BMSSE2OPT) || defined(BMSSE42OPT)
81 explicit operator __m512i*() {
return &
b_.w512[0]; }
82 explicit operator const __m512i*()
const {
return &
b_.w512[0]; }
85 explicit operator __m256i*() {
return &
b_.w256[0]; }
86 explicit operator const __m256i*()
const {
return &
b_.w256[0]; }
88 #if defined(BMSSE2OPT) || defined(BMSSE42OPT)
89 explicit operator __m128i*() {
return &
b_.w128[0]; }
90 explicit operator const __m128i*()
const {
return &
b_.w128[0]; }
105 return v1 <
v2 ? v1 :
v2;
114 static bool test() {
return true; }
118 static bool test() {
return false; }
130 if (x >= 1<<16) { x = (
T)(x >> 16);
l |= 16; }
131 if (x >= 1<<8) { x = (
T)(x >> 8);
l |= 8; }
132 if (x >= 1<<4) { x = (
T)(x >> 4);
l |= 4; }
133 if (x >= 1<<2) { x = (
T)(x >> 2);
l |= 2; }
134 if (x >= 1<<1)
l |=1;
146 if (x >= 1<<1)
l |=1;
177 ((x >= (1U << 24)) ? ((x >= (1 << 28)) ? 28u : 24u) : ((x >= (1U << 20)) ? 20u : 16u))
179 ((x >= (1U << 8)) ? ((x >= (1U << 12)) ? 12u : 8u) : ((x >= (1U << 4)) ? 4u : 0u));
231 #if defined(BM_x86) && !(defined(__arm__) || defined(__aarch64__))
238 asm volatile(
" bsfl %1, %0":
"=r"(
r):
"rm"(v) );
246 asm volatile(
" bsrl %1, %0":
"=r"(
r):
"rm"(v) );
254 #if defined(_M_AMD64) || defined(_M_X64)
260 _BitScanReverse(&
r,
value);
268 _BitScanForward(&
r,
value);
307 #if defined(BM_USE_GCC_BUILD) || (defined(__GNUG__) && (defined(__arm__) || defined(__aarch64__)))
308 return (
unsigned) (31 - __builtin_clz(w));
310 # if defined(BM_x86) && (defined(__GNUG__) || defined(_MSC_VER))
311 return bm::bsr_asm32(w);
313 return bm::ilog2_LUT<unsigned int>(w);
322 #if defined(BM_USE_GCC_BUILD) || (defined(__GNUG__) && (defined(__arm__) || defined(__aarch64__)))
323 return (
unsigned) __builtin_ctz(w);
325 # if defined(BM_x86) && (defined(__GNUG__) || defined(_MSC_VER))
326 return bm::bsf_asm32(w);
337 #if defined(BMAVX2OPT) || defined (BMAVX512OPT)
347 #if defined(BMAVX2OPT) || defined (BMAVX512OPT)
359 #if defined(BMAVX2OPT) || defined (BMAVX512OPT)
360 return (
unsigned)_lzcnt_u32(w);
362 #if defined(BM_USE_GCC_BUILD) || defined(__GNUG__)
363 return (
unsigned) __builtin_clz(w);
376 #if defined(BMAVX2OPT) || defined (BMAVX512OPT)
377 return (
unsigned)_lzcnt_u64(w);
379 #if defined(BM_USE_GCC_BUILD) || (defined(__GNUG__) && (defined(__arm__) || defined(__aarch64__)))
380 return (
unsigned) __builtin_clzll(w);
383 unsigned w1 = unsigned(w >> 32);
405 #if defined(BMAVX2OPT) || defined (BMAVX512OPT)
406 return (
unsigned)_tzcnt_u32(w);
408 #if defined(BM_USE_GCC_BUILD) || (defined(__GNUG__) && (defined(__arm__) || defined(__aarch64__)))
409 return (
unsigned) __builtin_ctz(w);
423 #if defined(BMAVX2OPT) || defined (BMAVX512OPT)
424 return (
unsigned)_tzcnt_u64(w);
426 #if defined(BM_USE_GCC_BUILD) || (defined(__GNUG__) && (defined(__arm__) || defined(__aarch64__)))
427 return (
unsigned) __builtin_ctzll(w);
430 unsigned w1 = unsigned(w);
434 w1 = unsigned(w >> 32);
459 #if defined(BM_USE_GCC_BUILD) || (defined(__GNUG__) && (defined(__arm__) || defined(__aarch64__)))
460 return (
unsigned) (63 - __builtin_clzll(
value));
464 unsigned v = (unsigned)v8;
516 unsigned m = (~0u << nbit);
525 unsigned m = ~0u >> (31 - nbit);
537 x ^= y; y ^= x; x ^= y;
542 #pragma GCC diagnostic pop
545 #pragma warning( pop )
559 for (
unsigned i = 0; w && (i < 8); ++i, w >>= 8)
561 if ((
unsigned char) w)
573 return (v - 0x0101010101010101ULL) & ~(v) & 0x8080808080808080ULL;
584 #if defined(BMSSE42OPT) || defined(BMAVX2OPT) || defined(BMAVX512OPT)
587 #if defined(BM_USE_GCC_BUILD)
588 return (
bm::id_t)__builtin_popcount(w);
607 #if defined(BMSSE42OPT) || defined(BMAVX2OPT) || defined(BMAVX512OPT)
608 #if defined(BM64_SSE4) || defined(BM64_AVX2) || defined(BM64_AVX512)
614 #if defined(BM_USE_GCC_BUILD) || defined(__arm64__)
615 return (
unsigned)__builtin_popcountll(x);
617 #if (defined(__arm__))
620 x = x - ((x >> 1) & 0x5555555555555555);
621 x = (x & 0x3333333333333333) + ((x >> 2) & 0x3333333333333333);
622 x = (x + (x >> 4)) & 0x0F0F0F0F0F0F0F0F;
636 template<
typename T >
639 #if defined (BM_ALLOC_ALIGN)
640 return !(
reinterpret_cast<unsigned int*
>(p) % BM_ALLOC_ALIGN);
Constants, lookup tables and typedefs.
#define BM_VECT_ALIGN_ATTR
Mini auto-pointer for internal memory management.
ptr_guard(const ptr_guard< T > &p)
ptr_guard & operator=(const ptr_guard< T > &p)
#define test(a, b, c, d, e)
unsigned word_bitcount64(bm::id64_t x) noexcept
unsigned count_leading_zeros(unsigned x) noexcept
Portable LZCNT with (uses minimal LUT)
bm::id_t word_bitcount(bm::id_t w) noexcept
unsigned count_trailing_zeros(unsigned v) noexcept
Portable TZCNT with (uses 37-LUT)
unsigned bit_scan_reverse(T value) noexcept
void xor_swap(W &x, W &y) noexcept
XOR swap two variables.
unsigned count_trailing_zeros_u64(bm::id64_t w) noexcept
64-bit bit-scan fwd
unsigned long long bmi_bslr_u64(unsigned long long w) noexcept
T ilog2(T x) noexcept
Fast loop-less function to find LOG2.
T ilog2_LUT(T x) noexcept
Lookup table based integer LOG2.
const unsigned set_block_size
unsigned long long int id64_t
unsigned mask_r_u32(unsigned nbit) noexcept
unsigned bit_scan_forward32(unsigned w) noexcept
T bit_scan_fwd(T v) noexcept
unsigned compute_h64_mask(unsigned long long w) noexcept
Сompute mask of bytes presense in 64-bit word.
unsigned count_leading_zeros_u32(unsigned w) noexcept
32-bit bit-scan reverse
unsigned short gap_word_t
bool is_aligned(T *p) noexcept
Check pointer alignment.
unsigned count_leading_zeros_u64(bm::id64_t w) noexcept
64-bit bit-scan reverse
bool has_zero_byte_u64(bm::id64_t v) noexcept
Returns true if INT64 contains 0 octet.
unsigned count_trailing_zeros_u32(unsigned w) noexcept
32-bit bit-scan fwd
T min_value(T v1, T v2) noexcept
Get minimum of 2 values.
unsigned long long bmi_blsi_u64(unsigned long long w)
unsigned bit_scan_reverse32(unsigned w) noexcept
unsigned mask_l_u32(unsigned nbit) noexcept
const GenericPointer< typename T::ValueType > T2 value
double r(size_t dimension_, const Int4 *score_, const double *prob_, double theta_)
static SLJIT_INLINE sljit_ins l(sljit_gpr r, sljit_s32 d, sljit_gpr x, sljit_gpr b)
static int _mm_popcnt_u32(unsigned int a)
static int64_t _mm_popcnt_u64(uint64_t a)
static unsigned op(unsigned v1, unsigned v2)
bit-block array wrapped into union for correct interpretation of 32-bit vs 64-bit access vs SIMD
union bm::bit_block_t::bunion_t b_
const bm::word_t * end() const
const bm::word_t * begin() const
Structure to aid in counting bits table contains count of bits in 0-255 diapason of numbers.
Structure keeps all-left/right ON bits masks.
ad-hoc conditional expressions
Structure keeps index of first right 1 bit for every byte.
Structure for LZCNT constants (4-bit)
static unsigned op(unsigned v1, unsigned v2)
static unsigned op(unsigned v1, unsigned v2)
Structure for TZCNT constants.
static unsigned op(unsigned v1, unsigned v2)
bm::word_t w32[bm::set_block_size]
bm::id64_t w64[bm::set_block_size/2]