212void callFunctor(
void* ptr) {
213 (*
static_cast<F*
>(ptr))();
217void destroyFunctor(
void* ptr) {
218 static_cast<F*
>(ptr)->~F();
219 constexpr size_t kFuncSize =
static_cast<size_t>(dispenso::detail::nextPow2(
sizeof(F)));
220 dispenso::deallocSmallBuffer<kFuncSize>(ptr);
233 Node& operator=(
const Node&) =
delete;
236 : numIncompletePredecessors_(other.numIncompletePredecessors_.load()),
237 numPredecessors_(other.numPredecessors_),
238 invoke_(other.invoke_),
239 destroy_(other.destroy_),
240 funcBuffer_(other.funcBuffer_),
241 dependents_(std::move(other.dependents_)) {
242 other.funcBuffer_ =
nullptr;
246 destroy_(funcBuffer_);
254 template <
typename... Ns>
256 ((void)std::initializer_list<int>{(dependsOnOneNode(nodes), 0)...});
263 invoke_(funcBuffer_);
264 numIncompletePredecessors_.store(kCompleted, std::memory_order_release);
273 for (
const Node* dependent : dependents_) {
284 for (
Node* dependent : dependents_) {
292 return numPredecessors_;
301 return numIncompletePredecessors_.load(std::memory_order_relaxed) == kCompleted;
311 if (numIncompletePredecessors_.load(std::memory_order_relaxed) == kCompleted) {
312 numIncompletePredecessors_.store(0, std::memory_order_relaxed);
324 numIncompletePredecessors_.store(kCompleted, std::memory_order_relaxed);
329 template <class F, class X = std::enable_if_t<!std::is_base_of<Node, F>::value,
void>>
330 Node(F&& f) : numIncompletePredecessors_(0) {
331 using FNoRef =
typename std::remove_reference<F>::type;
333 constexpr size_t kFuncSize =
static_cast<size_t>(detail::nextPow2(
sizeof(FNoRef)));
334 funcBuffer_ = allocSmallBuffer<kFuncSize>();
335 new (funcBuffer_) FNoRef(std::forward<F>(f));
336 invoke_ = ::detail::callFunctor<FNoRef>;
337 destroy_ = ::detail::destroyFunctor<FNoRef>;
340 void dependsOnOneNode(
Node& node) {
341 node.dependents_.emplace_back(
this);
345 static constexpr size_t kCompleted = std::numeric_limits<size_t>::max();
346 mutable std::atomic<size_t> numIncompletePredecessors_;
347 size_t numPredecessors_ = 0;
350 using InvokerType = void (*)(
void* ptr);
353 InvokerType destroy_;
356 std::vector<Node*> dependents_;
359 friend class SubgraphT;
360 friend class ::detail::ExecutorBase;
361 template <
typename G>
362 friend void setAllNodesIncomplete(
const G& graph);
375 :
Node(std::move(other)), biPropSet_(std::move(other.biPropSet_)) {}
382 template <
class... Ns>
384 ((void)std::initializer_list<int>{(biPropDependsOnOneNode(nodes), 0)...});
393 return biPropSet_ && biPropSet_ == node.biPropSet_;
397 template <class T, class X = std::enable_if_t<!std::is_base_of<BiPropNode, T>::value,
void>>
399 inline void removeFromBiPropSet() {
400 if (biPropSet_ !=
nullptr) {
401 auto it = std::find(biPropSet_->begin(), biPropSet_->end(),
this);
402 if (it != biPropSet_->end()) {
403 biPropSet_->erase(it);
408 DISPENSO_DLL_ACCESS
void biPropDependsOnOneNode(BiPropNode& node);
410 std::shared_ptr<std::vector<const BiPropNode*>> biPropSet_;
413 friend class SubgraphT;
414 friend class ::detail::ExecutorBase;
434 : graph_(other.graph_),
435 nodes_(std::move(other.nodes_)),
436 allocator_(std::move(other.allocator_)) {}
445 DISPENSO_REQUIRES(OnceCallableFunc<T>)
447 nodes_.push_back(
new (allocator_->alloc()) NodeType(std::forward<T>(f)));
448 return *nodes_.back();
454 return nodes_.size();
461 const N&
node(
size_t index)
const {
462 return *nodes_[index];
470 return *nodes_[index];
479 for (
const N* node : nodes_) {
491 for (N* node : nodes_) {
503 using PoolPtr = std::unique_ptr<NoLockPoolAllocator, DeallocFunc>;
505 static constexpr size_t kNodeSizeP2 =
static_cast<size_t>(detail::nextPow2(
sizeof(NodeType)));
507 explicit SubgraphT(
GraphT<N>* graph) : graph_(graph), nodes_(), allocator_(getAllocator()) {}
509 inline void removeNodeFromBiPropSet(
Node* ) {}
510 void removeNodeFromBiPropSet(BiPropNode* node) {
511 node->removeFromBiPropSet();
513 void decrementDependentCounters();
514 size_t markNodesWithPredicessors();
515 void removePredecessorDependencies(
size_t numGraphPredecessors);
519 static PoolPtr getAllocator();
520 static void releaseAllocator(NoLockPoolAllocator* ptr);
523#if defined(_WIN32) && !defined(__MINGW32__)
525#pragma warning(disable : 4251)
527 std::vector<N*> nodes_;
530#if defined(_WIN32) && !defined(__MINGW32__)
575 DISPENSO_REQUIRES(OnceCallableFunc<T>)
577 return subgraphs_[0].addNode(std::forward<T>(f));
583 return subgraphs_[0].numNodes();
590 const N&
node(
size_t index)
const {
591 return subgraphs_[0].node(index);
599 return subgraphs_[0].node(index);
609 return subgraphs_.size();
617 return subgraphs_[index];
625 return subgraphs_[index];
658 for (
const N* node : subgraph.nodes_) {
671 for (N* node : subgraph.nodes_) {
688 subgraph.destroyNodes();
693 static constexpr size_t kSubgraphSizeP2 =
694 static_cast<size_t>(detail::nextPow2(
sizeof(SubgraphType)));
696#if defined(_WIN32) && !defined(__MINGW32__)
698#pragma warning(disable : 4251)
700 std::deque<SubgraphT<N>> subgraphs_;
701#if defined(_WIN32) && !defined(__MINGW32__)
BiPropNode(BiPropNode &&other) noexcept
bool isSameSet(const BiPropNode &node) const
void biPropDependsOn(Ns &... nodes)
void forEachNode(F &&func) const
void forEachNode(F &&func)
void forEachSubgraph(F &&func) const
size_t numSubgraphs() const
void forEachSubgraph(F &&func)
const SubgraphT< N > & subgraph(size_t index) const
GraphT(GraphT< N > &&other)
const N & node(size_t index) const
SubgraphT< N > & subgraph(size_t index)
SubgraphT< N > & addSubgraph()
bool setIncomplete() const
void setCompleted() const
void forEachDependent(F &&func) const
void forEachDependent(F &&func)
void dependsOn(Ns &... nodes)
size_t numPredecessors() const
Node(Node &&other) noexcept
const N & node(size_t index) const
void forEachNode(F &&func) const
void forEachNode(F &&func)
SubgraphT(SubgraphT< N > &&other) noexcept