2015-12-08 04:51:55 +08:00
|
|
|
#pragma once
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
#include <atomic>
|
2016-08-18 22:34:36 +08:00
|
|
|
#include <cassert>
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
#include "threading/sync/lockable.hpp"
|
|
|
|
#include "threading/sync/spinlock.hpp"
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
template <class block_t = uint8_t, size_t chunk_size = 32768>
|
|
|
|
class DynamicBitset : Lockable<SpinLock>
|
2015-07-07 22:18:26 +08:00
|
|
|
{
|
2015-09-25 00:36:16 +08:00
|
|
|
struct Block
|
|
|
|
{
|
2015-09-25 08:52:01 +08:00
|
|
|
Block() = default;
|
|
|
|
|
2016-08-18 22:34:36 +08:00
|
|
|
Block(Block &) = delete;
|
|
|
|
Block(Block &&) = delete;
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
static constexpr size_t size = sizeof(block_t) * 8;
|
2016-03-12 19:26:56 +08:00
|
|
|
|
2015-09-25 08:52:01 +08:00
|
|
|
constexpr block_t bitmask(size_t group_size) const
|
2015-09-25 00:36:16 +08:00
|
|
|
{
|
|
|
|
return (block_t)(-1) >> (size - group_size);
|
|
|
|
}
|
|
|
|
|
|
|
|
block_t at(size_t k, size_t n, std::memory_order order)
|
|
|
|
{
|
|
|
|
assert(k + n - 1 < size);
|
|
|
|
return (block.load(order) >> k) & bitmask(n);
|
|
|
|
}
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
void set(size_t k, size_t n, std::memory_order order)
|
|
|
|
{
|
|
|
|
assert(k + n - 1 < size);
|
2016-03-12 19:26:56 +08:00
|
|
|
block.fetch_or(bitmask(n) << k, order);
|
2015-09-25 00:36:16 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void clear(size_t k, size_t n, std::memory_order order)
|
|
|
|
{
|
|
|
|
assert(k + n - 1 < size);
|
|
|
|
block.fetch_and(~(bitmask(n) << k), order);
|
|
|
|
}
|
2016-03-12 19:26:56 +08:00
|
|
|
|
2016-08-18 22:34:36 +08:00
|
|
|
std::atomic<block_t> block{0};
|
2015-09-25 00:36:16 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
struct Chunk
|
2015-07-07 22:18:26 +08:00
|
|
|
{
|
2015-09-25 00:36:16 +08:00
|
|
|
Chunk() : next(nullptr)
|
|
|
|
{
|
|
|
|
static_assert(chunk_size % sizeof(block_t) == 0,
|
2016-08-18 22:34:36 +08:00
|
|
|
"chunk size not divisible by block size");
|
2015-09-25 00:36:16 +08:00
|
|
|
}
|
|
|
|
|
2016-08-18 22:34:36 +08:00
|
|
|
Chunk(Chunk &) = delete;
|
|
|
|
Chunk(Chunk &&) = delete;
|
2015-09-25 00:36:16 +08:00
|
|
|
|
2016-08-18 22:34:36 +08:00
|
|
|
~Chunk() { delete next; }
|
2015-09-25 00:36:16 +08:00
|
|
|
|
|
|
|
static constexpr size_t size = chunk_size * Block::size;
|
|
|
|
static constexpr size_t n_blocks = chunk_size / sizeof(block_t);
|
|
|
|
|
|
|
|
block_t at(size_t k, size_t n, std::memory_order order)
|
|
|
|
{
|
|
|
|
return blocks[k / Block::size].at(k % Block::size, n, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
void set(size_t k, size_t n, std::memory_order order)
|
|
|
|
{
|
|
|
|
blocks[k / Block::size].set(k % Block::size, n, order);
|
|
|
|
}
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
void clear(size_t k, size_t n, std::memory_order order)
|
|
|
|
{
|
|
|
|
blocks[k / Block::size].clear(k % Block::size, n, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
Block blocks[n_blocks];
|
2016-08-18 22:34:36 +08:00
|
|
|
std::atomic<Chunk *> next;
|
2015-09-25 00:36:16 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
public:
|
2015-09-25 08:52:01 +08:00
|
|
|
DynamicBitset() : head(new Chunk()) {}
|
|
|
|
|
2016-08-18 22:34:36 +08:00
|
|
|
DynamicBitset(DynamicBitset &) = delete;
|
|
|
|
DynamicBitset(DynamicBitset &&) = delete;
|
|
|
|
|
|
|
|
~DynamicBitset()
|
|
|
|
{
|
|
|
|
auto now = head.load();
|
|
|
|
while (now != nullptr) {
|
|
|
|
auto next = now->next.load();
|
|
|
|
delete now;
|
|
|
|
now = next;
|
|
|
|
}
|
|
|
|
}
|
2015-09-25 08:52:01 +08:00
|
|
|
|
|
|
|
block_t at(size_t k, size_t n)
|
|
|
|
{
|
2016-08-18 22:34:36 +08:00
|
|
|
auto &chunk = find_chunk(k);
|
2015-09-25 08:52:01 +08:00
|
|
|
return chunk.at(k, n, std::memory_order_seq_cst);
|
|
|
|
}
|
2015-09-25 00:36:16 +08:00
|
|
|
|
2015-09-25 08:52:01 +08:00
|
|
|
bool at(size_t k)
|
2015-07-07 22:18:26 +08:00
|
|
|
{
|
2016-08-18 22:34:36 +08:00
|
|
|
auto &chunk = find_chunk(k);
|
2015-09-25 08:52:01 +08:00
|
|
|
return chunk.at(k, 1, std::memory_order_seq_cst);
|
2015-07-07 22:18:26 +08:00
|
|
|
}
|
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
void set(size_t k, size_t n = 1)
|
2015-07-07 22:18:26 +08:00
|
|
|
{
|
2016-08-18 22:34:36 +08:00
|
|
|
auto &chunk = find_chunk(k);
|
2015-09-25 08:52:01 +08:00
|
|
|
return chunk.set(k, n, std::memory_order_seq_cst);
|
2015-07-07 22:18:26 +08:00
|
|
|
}
|
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
void clear(size_t k, size_t n = 1)
|
2015-07-07 22:18:26 +08:00
|
|
|
{
|
2016-08-18 22:34:36 +08:00
|
|
|
auto &chunk = find_chunk(k);
|
2015-09-25 08:52:01 +08:00
|
|
|
return chunk.clear(k, n, std::memory_order_seq_cst);
|
2015-07-07 22:18:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2016-08-18 22:34:36 +08:00
|
|
|
Chunk &find_chunk(size_t &k)
|
2015-07-07 22:18:26 +08:00
|
|
|
{
|
2016-08-18 22:34:36 +08:00
|
|
|
Chunk *chunk = head.load(), *next = nullptr;
|
2015-07-07 22:18:26 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
// while i'm not in the right chunk
|
|
|
|
// (my index is bigger than the size of this chunk)
|
2016-08-18 22:34:36 +08:00
|
|
|
while (k >= Chunk::size) {
|
2015-09-25 00:36:16 +08:00
|
|
|
next = chunk->next.load();
|
|
|
|
|
|
|
|
// if a next chunk exists, switch to it and decrement my
|
|
|
|
// pointer by the size of the current chunk
|
2016-08-18 22:34:36 +08:00
|
|
|
if (next != nullptr) {
|
2015-09-25 00:36:16 +08:00
|
|
|
chunk = next;
|
|
|
|
k -= Chunk::size;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// the next chunk does not exist and we need it. take an exclusive
|
|
|
|
// lock to prevent others that also want to create a new chunk
|
|
|
|
// from creating it
|
2015-10-08 06:58:29 +08:00
|
|
|
auto guard = acquire_unique();
|
2016-03-12 19:26:56 +08:00
|
|
|
|
2015-09-25 00:36:16 +08:00
|
|
|
// double-check locking. if the chunk exists now, some other thread
|
|
|
|
// has just created it, continue searching for my chunk
|
2016-08-18 22:34:36 +08:00
|
|
|
if (chunk->next.load() != nullptr) continue;
|
2015-09-25 00:36:16 +08:00
|
|
|
|
|
|
|
chunk->next.store(new Chunk());
|
|
|
|
}
|
|
|
|
|
|
|
|
assert(chunk != nullptr);
|
|
|
|
return *chunk;
|
2015-07-07 22:18:26 +08:00
|
|
|
}
|
|
|
|
|
2016-08-18 22:34:36 +08:00
|
|
|
std::atomic<Chunk *> head;
|
2015-07-07 22:18:26 +08:00
|
|
|
};
|