Initial commit: Final state of the master project
This commit is contained in:
337
Research/scene/PoolBuilder/AdaptivePointerPoolBuilder.cpp
Normal file
337
Research/scene/PoolBuilder/AdaptivePointerPoolBuilder.cpp
Normal file
@@ -0,0 +1,337 @@
|
||||
#include "AdaptivePointerPoolBuilder.h"
|
||||
#include <algorithm>
|
||||
#include "../../inc/tbb/parallel_sort.h"
|
||||
|
||||
std::string AdaptivePointerPoolBuilder::GetFullFileName(const std::string& fileName) const
|
||||
{
|
||||
std::string maskBitSizes;
|
||||
for (unsigned8 maskBitSize : mMaskBits)
|
||||
maskBitSizes += std::to_string(maskBitSize);
|
||||
return fileName + ".a" + (mUseLookupTable ? "l" : "d") + std::to_string(mMaskSize) + maskBitSizes + ".pool";
|
||||
}
|
||||
|
||||
size_t GetIndexibleNodes(unsigned8 byteCount, unsigned8 maskSize)
|
||||
{
|
||||
return BitHelper::Exp2(byteCount * 8 - maskSize);
|
||||
}
|
||||
|
||||
size_t AdaptivePointerPoolBuilder::GetMaxLookupTableSize() const
|
||||
{
|
||||
size_t maxSize = 0;
|
||||
// The max lookup table size is the maximum size we can index using the consecutive pointer sizes (based on pointer bits):
|
||||
for (unsigned8 i = 0; i < BitHelper::Exp2(mMaskSize) - 1; i++)
|
||||
maxSize += GetIndexibleNodes(mMaskBits[i], mMaskSize);
|
||||
return maxSize;
|
||||
}
|
||||
|
||||
// Given an index in the lookup table, calculates the size of a pointer to that index (taking the mask size into account)
|
||||
unsigned8 AdaptivePointerPoolBuilder::GetMinimumSizeOfPointer(const unsigned32& pointer) const {
|
||||
size_t rangeStart = 0;
|
||||
size_t rangeEnd = 0;
|
||||
for (unsigned8 i = 0; i < BitHelper::Exp2(mMaskSize) - 1; i++)
|
||||
{
|
||||
unsigned8 size = mMaskBits[i];
|
||||
rangeEnd = rangeStart + GetIndexibleNodes(size, mMaskSize);
|
||||
if (pointer >= rangeStart && pointer < rangeEnd)
|
||||
return size;
|
||||
rangeStart = rangeEnd;
|
||||
}
|
||||
return 4;
|
||||
}
|
||||
|
||||
unsigned32 AdaptivePointerPoolBuilder::GetShortenedPointerTo(const unsigned32& pointer, unsigned8& mask) const
|
||||
{
|
||||
size_t rangeStart = 0;
|
||||
size_t rangeEnd = 0;
|
||||
for (unsigned8 i = 0; i < BitHelper::Exp2(mMaskSize) - 1; i++)
|
||||
{
|
||||
mask = i;
|
||||
unsigned8 size = mMaskBits[i];
|
||||
rangeEnd = rangeStart + GetIndexibleNodes(size, mMaskSize);
|
||||
if (pointer >= rangeStart && pointer < rangeEnd) break;
|
||||
rangeStart = rangeEnd;
|
||||
}
|
||||
if (pointer >= rangeEnd)
|
||||
mask = BitHelper::GetLSMask<unsigned8>(0, mMaskSize);
|
||||
return pointer - (unsigned32)rangeStart;
|
||||
}
|
||||
|
||||
void AdaptivePointerPoolBuilder::InitBuild(const BaseTree* tree)
|
||||
{
|
||||
if (!mBuildInitiated)
|
||||
CalculateEverything(tree, mPointerSizes, mPointerSizePerLevel, mNodeLevelOffsets, mParentCounts, mLookupTableNodesPerLevel, mNodeWithinLookupTable);
|
||||
mBuildInitiated = true;
|
||||
}
|
||||
|
||||
void AdaptivePointerPoolBuilder::CalculateEverything(const BaseTree* tree, std::vector<unsigned8>& pointerSizes, std::vector<unsigned8>& pointerSizesPerLevel, std::vector<unsigned32>& levelOffsets, std::vector<size_t>& parentCounts, std::vector<std::vector<unsigned32>>& lookupTableNodesPerLevel, BoolArray& nodeWithinLookupTable) const
|
||||
{
|
||||
// Calculate the pointer sizes and which nodes are in the lookup table.
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
unsigned32 nodeCount = (unsigned32)tree->GetNodeCount();
|
||||
|
||||
// Find out how many parents all nodes have
|
||||
parentCounts = tree->GetParentCounts();
|
||||
|
||||
// Sort the nodes on parent counts per level.
|
||||
// Do this by sorting a map. This maps indices in the GPU pool to indices in the tree.
|
||||
// e.g. to find the node at position 6 in the GPU pool, use tree->GetNode(nodeMap[6]).
|
||||
std::vector<unsigned32> nodeMap(nodeCount);
|
||||
tbb::parallel_for((unsigned32)0, nodeCount, [&](const unsigned32& i){ nodeMap[i] = i; });
|
||||
OrderNodes(tree, nodeMap, parentCounts);
|
||||
|
||||
// Now find the indices where the levels start
|
||||
levelOffsets = std::vector<unsigned32>(depth + 2);
|
||||
unsigned8 curLevel = 255;
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeMap[i]);
|
||||
if (node->GetLevel() != curLevel) levelOffsets[++curLevel] = i;
|
||||
}
|
||||
levelOffsets[depth + 1] = (unsigned32)tree->GetNodeCount();
|
||||
|
||||
// Go bottom-up through the tree. For each level, calculate the size of the level and the size of normal (byte) pointers.
|
||||
// Also calculate the size of the lookup table and the index until which nodes are put in the lookup table.
|
||||
// Store the index of the last lookup table node to decide the sizes of the lookup table pointers
|
||||
if (mUseLookupTable)
|
||||
{
|
||||
lookupTableNodesPerLevel = std::vector<std::vector<unsigned32>>(depth + 1);
|
||||
nodeWithinLookupTable = BoolArray(nodeCount);
|
||||
}
|
||||
std::vector<unsigned32> nodePointerWithinLevel;
|
||||
if (!mUseLookupTable) nodePointerWithinLevel.resize(nodeCount);
|
||||
|
||||
pointerSizesPerLevel = std::vector<unsigned8>(depth + 1);
|
||||
pointerSizes = std::vector<unsigned8>(nodeCount);
|
||||
|
||||
size_t maxLookupTableSize = GetMaxLookupTableSize();
|
||||
|
||||
for (unsigned8 level = depth + 1; level-- > 0;)
|
||||
{
|
||||
unsigned32 levelStart = levelOffsets[level];
|
||||
unsigned32 levelEnd = levelOffsets[level + 1];
|
||||
size_t levelSize = 0;
|
||||
std::vector<unsigned32> lookupTable;
|
||||
// Calculate the size of this level and which nodes are put in the lookup table
|
||||
for (unsigned32 i = levelStart; i < levelEnd; i++)
|
||||
{
|
||||
unsigned32 nodeId = nodeMap[i];
|
||||
if (!mUseLookupTable) nodePointerWithinLevel[nodeId] = (unsigned32)levelSize;
|
||||
// If the node has more than 2 parents, it's worth putting in the lookup table
|
||||
if (mUseLookupTable && parentCounts[nodeId] > 2 && i - levelStart < maxLookupTableSize)
|
||||
{
|
||||
lookupTable.push_back(nodeId);
|
||||
nodeWithinLookupTable.Set(nodeId, true);
|
||||
}
|
||||
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
unsigned32* children = node->GetChildren();
|
||||
size_t nodeSize = GetBaseNodeSize(tree, nodeId);
|
||||
for (ChildIndex c = 0; c < node->GetChildCount(); c++)
|
||||
nodeSize += pointerSizes[children[c]];
|
||||
levelSize += nodeSize;
|
||||
}
|
||||
// Now that we know the level size and the lookup table layout, we can calculate the size of pointers to nodes in this level
|
||||
unsigned8 levelPointerSize = BitHelper::RoundToBytes(BitHelper::Log2Ceil(levelSize) + mMaskSize) / 8;
|
||||
if (levelPointerSize == 0) levelPointerSize = 1; // Pointers should be at least 1 byte
|
||||
pointerSizesPerLevel[level] = levelPointerSize;
|
||||
|
||||
if (mUseLookupTable && levelPointerSize <= 1) // We can't save space with a lookup table
|
||||
{
|
||||
lookupTable.clear();
|
||||
for (unsigned32 i = levelStart; i < levelEnd; i++) nodeWithinLookupTable.Set(nodeMap[i], false);
|
||||
}
|
||||
|
||||
//// Hack: put everything in the lookup table.
|
||||
//nodeWithinLookupTable.SetRange(levelStart, levelEnd, true);
|
||||
//lookupTable.clear();
|
||||
//for (unsigned32 i = levelStart; i < levelEnd; i++) lookupTable.push_back(nodeMap[i]);
|
||||
|
||||
for (unsigned32 i = levelStart; i < levelEnd; i++)
|
||||
{
|
||||
unsigned32 nodeId = nodeMap[i];
|
||||
if (mUseLookupTable)
|
||||
{
|
||||
if (nodeWithinLookupTable[nodeId])
|
||||
// Since the nodes are inserted in the lookup table in order, we can calculate a nodes index in the lookup table
|
||||
// using i - levelStart.
|
||||
pointerSizes[nodeId] = GetMinimumSizeOfPointer(i - levelStart);
|
||||
else
|
||||
pointerSizes[nodeId] = levelPointerSize;
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned8 minimumPointerSize = GetMinimumSizeOfPointer(nodePointerWithinLevel[nodeId]);
|
||||
if (minimumPointerSize < BitHelper::Exp2(mMaskSize)) // Only use a smaller pointer if the size of the pointer can be indicated by the mask
|
||||
pointerSizes[nodeId] = minimumPointerSize;
|
||||
else
|
||||
pointerSizes[nodeId] = levelPointerSize;
|
||||
}
|
||||
}
|
||||
if (mUseLookupTable)
|
||||
lookupTableNodesPerLevel[level] = lookupTable;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
void AdaptivePointerPoolBuilder::OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeMap) const
|
||||
{
|
||||
OrderNodes(tree, nodeMap, mParentCounts);
|
||||
}
|
||||
|
||||
void AdaptivePointerPoolBuilder::OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeMap, const std::vector<size_t>& parentCounts)
|
||||
{
|
||||
// First order on level (asc), then on number of parents (desc), so that the most used nodes have the smallest pointers
|
||||
tbb::parallel_sort(nodeMap.begin(), nodeMap.end(), [&](const unsigned32& i1, const unsigned32& i2)
|
||||
{
|
||||
bool res = false;
|
||||
unsigned8 lvl1 = tree->GetNode(i1)->GetLevel();
|
||||
unsigned8 lvl2 = tree->GetNode(i2)->GetLevel();
|
||||
if (lvl1 != lvl2) res = lvl1 < lvl2;
|
||||
else if (parentCounts[i1] != parentCounts[i2]) res = (parentCounts[i1] > parentCounts[i2]);
|
||||
// If the level and number of parents is the same, then, for consistency, order on nodeID.
|
||||
else res = i1 < i2;
|
||||
return res;
|
||||
});
|
||||
}
|
||||
|
||||
void AdaptivePointerPoolBuilder::FinishBuild(const BaseTree* tree)
|
||||
{
|
||||
ClearVariables();
|
||||
mBuildInitiated = false;
|
||||
}
|
||||
|
||||
unsigned8 AdaptivePointerPoolBuilder::GetBytesPerPointer(const BaseTree* tree, const unsigned32& nodeIndex) const
|
||||
{
|
||||
return mPointerSizes[nodeIndex];
|
||||
}
|
||||
|
||||
size_t AdaptivePointerPoolBuilder::GetPoolInfoSize(const BaseTree* tree) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
// Start with a list of pointers to the starts of the lookup table (4 bytes each, one per level)
|
||||
return 4 * (depth + 1) // Size of the level offsets
|
||||
+ (depth + 1) // And the sizes of full pointers per level
|
||||
+ 1 // 1 Byte to indicate the pointer sizes that belong to each mask (e.g. 00 -> 1, 01 -> 2, 10 -> 3)
|
||||
+ (mUseLookupTable ? (4 * depth) : 0); // Size of the pointers to the starts of the lookup tables.
|
||||
// Note that the first level doesn't have a lookup table (as there are no pointers to the root).
|
||||
}
|
||||
|
||||
std::vector<unsigned8> AdaptivePointerPoolBuilder::GetPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder)
|
||||
{
|
||||
std::vector<unsigned8> res(GetPoolInfoSize(tree));
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
|
||||
// Calculate the pointer level offsets:
|
||||
mPointerLevelOffsets = std::vector<unsigned32>(depth + 1);
|
||||
for (unsigned8 level = 0; level <= depth; level++) mPointerLevelOffsets[level] = (unsigned32)nodePointers[nodeOrder[mNodeLevelOffsets[level]]];
|
||||
|
||||
// Write the level offsets
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
BitHelper::SplitInBytesAndMove(mPointerLevelOffsets[level], res, level * 4, 4);
|
||||
|
||||
// Write the pointer sizes per level (for full pointers)
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
res[(depth + 1) * 4 + level] = mPointerSizePerLevel[level];
|
||||
|
||||
// Write the byte indicating the pointer sizes per mask value
|
||||
unsigned8 maskBitsDescr = 0;
|
||||
for (unsigned8 i = 0; i < BitHelper::Exp2(mMaskSize) - 1; i++)
|
||||
maskBitsDescr |= (mMaskBits[i] - 1) << (i * 2);
|
||||
res[(depth + 1) * (4 + 1)] = maskBitsDescr;
|
||||
|
||||
// Write the pointers to the lookup table starts per level
|
||||
if (mUseLookupTable)
|
||||
{
|
||||
size_t curIndex = GetAdditionalPoolInfoStart(tree); // curIndex is the start of the lookup tables
|
||||
for (unsigned8 level = 1; level <= depth; level++)
|
||||
{
|
||||
BitHelper::SplitInBytesAndMove(curIndex, res, (depth + 1) * (4 + 1) + 1 + (level - 1) * 4, 4);
|
||||
curIndex += mLookupTableNodesPerLevel[level].size() * mPointerSizePerLevel[level];
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
size_t AdaptivePointerPoolBuilder::GetAdditionalPoolInfoSize(const BaseTree* tree) const
|
||||
{
|
||||
// Calculate the size of the lookup tables
|
||||
if (mUseLookupTable)
|
||||
{
|
||||
size_t res = 0;
|
||||
for (unsigned8 level = 1; level <= tree->GetMaxLevel(); level++)
|
||||
res += mLookupTableNodesPerLevel[level].size() * mPointerSizePerLevel[level];
|
||||
return res;
|
||||
}
|
||||
else return 0;
|
||||
}
|
||||
|
||||
std::vector<unsigned8> AdaptivePointerPoolBuilder::GetAdditionalPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder)
|
||||
{
|
||||
std::vector<unsigned8> res(GetAdditionalPoolInfoSize(tree));
|
||||
if (mUseLookupTable)
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
|
||||
// Write the lookup tables
|
||||
size_t curIndex = 0;
|
||||
for (unsigned8 level = 1; level <= depth; level++)
|
||||
{
|
||||
unsigned8 levelPointerSize = mPointerSizePerLevel[level];
|
||||
unsigned32 levelOffset = mPointerLevelOffsets[level];
|
||||
std::vector<unsigned32>& levelLookupTableNodes = mLookupTableNodesPerLevel[level];
|
||||
for (unsigned32 lookupTableNode : levelLookupTableNodes)
|
||||
{
|
||||
unsigned32 actualPointer = (unsigned32)nodePointers[lookupTableNode] - levelOffset;
|
||||
BitHelper::SplitInBytesAndMove(actualPointer, res, curIndex, levelPointerSize);
|
||||
curIndex += levelPointerSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::vector<unsigned8> AdaptivePointerPoolBuilder::WrapPointer(const BaseTree* tree, const unsigned32& nodeIndex, const unsigned32& indexInPool, const unsigned32& pointer) const
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeIndex);
|
||||
unsigned8 level = node->GetLevel();
|
||||
unsigned8 pointerSize = mPointerSizes[nodeIndex];
|
||||
unsigned8 bitPointerSize = pointerSize * 8;
|
||||
unsigned32 mask = (unsigned32)BitHelper::Exp2(mMaskSize) - 1;
|
||||
unsigned32 actualPointer = pointer - mPointerLevelOffsets[level];
|
||||
if (mUseLookupTable)
|
||||
{
|
||||
if (mNodeWithinLookupTable[nodeIndex])
|
||||
{
|
||||
// Find the index of the node within the lookup table
|
||||
size_t lookupTablePointer = indexInPool - mNodeLevelOffsets[level];
|
||||
assert(mLookupTableNodesPerLevel[level][lookupTablePointer] == nodeIndex);
|
||||
unsigned8 sectionMask;
|
||||
actualPointer = GetShortenedPointerTo((unsigned32)lookupTablePointer, sectionMask);
|
||||
mask = sectionMask;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned8 sectionMask;
|
||||
actualPointer = GetShortenedPointerTo(actualPointer, sectionMask);
|
||||
mask = sectionMask;
|
||||
}
|
||||
assert(actualPointer < BitHelper::Exp2(bitPointerSize - mMaskSize));
|
||||
assert(mask < BitHelper::Exp2(mMaskSize));
|
||||
unsigned32 pointerWithMask = actualPointer | (mask << (bitPointerSize - mMaskSize));
|
||||
return BitHelper::SplitInBytes(pointerWithMask, pointerSize);
|
||||
}
|
||||
|
||||
|
||||
void AdaptivePointerPoolBuilder::ClearVariables()
|
||||
{
|
||||
mPointerSizes.clear(); mPointerSizes.shrink_to_fit();
|
||||
mPointerSizePerLevel.clear(); mPointerSizePerLevel.shrink_to_fit();
|
||||
mPointerLevelOffsets.clear(); mPointerLevelOffsets.shrink_to_fit();
|
||||
mNodeLevelOffsets.clear(); mNodeLevelOffsets.shrink_to_fit();
|
||||
mParentCounts.clear(); mParentCounts.shrink_to_fit();
|
||||
mLookupTableNodesPerLevel.clear(); mLookupTableNodesPerLevel.shrink_to_fit();
|
||||
mNodeWithinLookupTable.Resize(0);
|
||||
}
|
||||
65
Research/scene/PoolBuilder/AdaptivePointerPoolBuilder.h
Normal file
65
Research/scene/PoolBuilder/AdaptivePointerPoolBuilder.h
Normal file
@@ -0,0 +1,65 @@
|
||||
#pragma once
|
||||
#include "BaseTreePoolBuilder.h"
|
||||
#include "../../core/Util/BoolArray.h"
|
||||
|
||||
class AdaptivePointerPoolBuilder : public BaseTreePoolBuilder
|
||||
{
|
||||
public:
|
||||
AdaptivePointerPoolBuilder(bool useLookupTable, unsigned8 maskSize, unsigned8 size1 = 1, unsigned8 size2 = 2, unsigned8 size3 = 3) : BaseTreePoolBuilder(), mMaskSize(maskSize), mMaskBits(std::vector<unsigned8>{size1, size2, size3}), mUseLookupTable(useLookupTable)
|
||||
{
|
||||
assert(maskSize == 1 || maskSize == 2);
|
||||
assert(size1 <= 4 && size2 <= 4 && size3 <= 4);
|
||||
}
|
||||
|
||||
virtual ~AdaptivePointerPoolBuilder() override {}
|
||||
|
||||
std::string GetFullFileName(const std::string& fileName) const override;
|
||||
protected:
|
||||
void InitBuild(const BaseTree* tree) override;
|
||||
void FinishBuild(const BaseTree* tree) override;
|
||||
bool WordAligned() const override { return false; }
|
||||
|
||||
unsigned8 GetBytesPerPointer(const BaseTree* tree, const unsigned32& nodeIndex) const override;
|
||||
std::vector<unsigned8> WrapPointer(const BaseTree* tree, const unsigned32& nodeIndex, const unsigned32& indexInPool, const unsigned32& pointer) const override;
|
||||
|
||||
size_t GetPoolInfoSize(const BaseTree* tree) const override;
|
||||
std::vector<unsigned8> GetPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder) override;
|
||||
|
||||
size_t GetAdditionalPoolInfoSize(const BaseTree* tree) const override;
|
||||
std::vector<unsigned8> GetAdditionalPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder) override;
|
||||
|
||||
void OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeOrder) const override;
|
||||
static void OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeOrder, const std::vector<size_t>& parentCounts);
|
||||
|
||||
void ClearVariables();
|
||||
|
||||
size_t GetMaxLookupTableSize() const;
|
||||
unsigned8 GetMinimumSizeOfPointer(const unsigned32& index) const;
|
||||
// Calculates in which "section" this pointer is. Wraps to pointer to a pointer within that sections and returns the mask.
|
||||
unsigned32 GetShortenedPointerTo(const unsigned32& pointer, unsigned8& mask) const;
|
||||
void CalculateEverything(const BaseTree* tree, std::vector<unsigned8>& pointerSizes, std::vector<unsigned8>& pointerSizesPerLevel, std::vector<unsigned32>& levelOffsets, std::vector<size_t>& parentCounts, std::vector<std::vector<unsigned32>>& lookupTableNodesPerLevel, BoolArray& nodeWithinLookupTable) const;
|
||||
|
||||
// Size of the mask used to indicate the size of pointers
|
||||
const unsigned8 mMaskSize;
|
||||
// Indicates how many bits each index of the mask indicates. Standard is "1, 2, 3", but
|
||||
// one could image stuff such as "1, 1, 2", when 00 indicates the first 64 nodes, 01 the second 64 nodes, and 10 the next 16K nodes.
|
||||
const std::vector<unsigned8> mMaskBits;
|
||||
const bool mUseLookupTable;
|
||||
|
||||
// Variables used during the current build
|
||||
|
||||
// Pointer sizes for each individual node within the tree
|
||||
std::vector<unsigned8> mPointerSizes;
|
||||
// Pointer sizes for direct (byte precision) pointers per level
|
||||
std::vector<unsigned8> mPointerSizePerLevel;
|
||||
// Level offsets as the index of the first node in each level
|
||||
std::vector<unsigned32> mNodeLevelOffsets;
|
||||
std::vector<unsigned32> mPointerLevelOffsets;
|
||||
//std::vector<unsigned32> mLevelOffsets;
|
||||
std::vector<size_t> mParentCounts;
|
||||
std::vector<std::vector<unsigned32>> mLookupTableNodesPerLevel;
|
||||
BoolArray mNodeWithinLookupTable;
|
||||
|
||||
bool mBuildInitiated = false;
|
||||
};
|
||||
|
||||
66
Research/scene/PoolBuilder/BasePoolBuilder.h
Normal file
66
Research/scene/PoolBuilder/BasePoolBuilder.h
Normal file
@@ -0,0 +1,66 @@
|
||||
#pragma once
|
||||
#include <vector>
|
||||
#include <stdio.h>
|
||||
#include <fstream>
|
||||
|
||||
#include "../../core/Defines.h"
|
||||
|
||||
#include "../../core/Serializer.h"
|
||||
|
||||
// Pool builder class that can be used to build a pool for a specific kind of tree. Constructor should indicate the tree type.
|
||||
template<class T>
|
||||
class BasePoolBuilder
|
||||
{
|
||||
public:
|
||||
virtual ~BasePoolBuilder() {}
|
||||
|
||||
virtual size_t GetPoolSize(const T* tree) = 0;
|
||||
virtual bool BuildPool(const T* tree, std::vector<unsigned8>& pool) = 0;
|
||||
virtual bool VerifyPool(std::vector<unsigned8>& pool, const unsigned8& depth) const = 0;
|
||||
virtual std::string GetFullFileName(const std::string& fileName) const = 0;
|
||||
|
||||
virtual bool ReadPool(const std::string& fileName, std::vector<unsigned8>& pool) const
|
||||
{
|
||||
std::string binFileName = GetFullFileName(fileName);
|
||||
std::ifstream nodePoolInFile(binFileName, std::ios::binary);
|
||||
|
||||
if (nodePoolInFile.good()) {
|
||||
|
||||
// Destroy whole current node pool
|
||||
Serializer<std::vector<unsigned8>, unsigned64>::Deserialize(pool, nodePoolInFile);
|
||||
nodePoolInFile.close();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool WritePool(const std::string& fileName, const std::vector<unsigned8>& pool) const
|
||||
{
|
||||
std::string binFileName = GetFullFileName(fileName);
|
||||
|
||||
if (!pool.empty()) {
|
||||
std::ofstream nodePoolOutFile(binFileName, std::ios::binary);
|
||||
Serializer<std::vector<unsigned8>, unsigned64>::Serialize(pool, nodePoolOutFile);
|
||||
nodePoolOutFile.close();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool BuildOrReadPool(const T* tree, const std::string& fileName, std::vector<unsigned8>& pool)
|
||||
{
|
||||
if (ReadPool(fileName, pool)) return true;
|
||||
bool res = BuildPool(tree, pool);
|
||||
if (res) WritePool(fileName, pool);
|
||||
return res;
|
||||
}
|
||||
|
||||
bool VerifyCachedPool(const std::string& fileName, const unsigned8& depth) const
|
||||
{
|
||||
std::vector<unsigned8> pool;
|
||||
if (ReadPool(fileName, pool))
|
||||
return VerifyPool(pool, depth);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
262
Research/scene/PoolBuilder/BaseTreePoolBuilder.cpp
Normal file
262
Research/scene/PoolBuilder/BaseTreePoolBuilder.cpp
Normal file
@@ -0,0 +1,262 @@
|
||||
#include "BaseTreePoolBuilder.h"
|
||||
#include <algorithm>
|
||||
#include "../../inc/tbb/parallel_sort.h"
|
||||
|
||||
//************************************
|
||||
// Calculates the minimum size of the node pool texture, so that all nodes + pointers fit
|
||||
//************************************
|
||||
size_t BaseTreePoolBuilder::GetPoolSize(const BaseTree* tree)
|
||||
{
|
||||
return GetMinimumNodePoolTexelCount(tree);
|
||||
}
|
||||
|
||||
size_t BaseTreePoolBuilder::GetMinimumNodePoolTexelCount(const BaseTree* tree)
|
||||
{
|
||||
assert(tree != NULL);
|
||||
if (!mIsBuilding)
|
||||
InitBuild(tree);
|
||||
size_t texelCount = GetTreeInfoBytesSize(tree);
|
||||
for (unsigned32 i = 0; i < (unsigned32)tree->GetNodeCount(); i++)
|
||||
texelCount += GetNodeSize(tree, i);
|
||||
if (!mIsBuilding)
|
||||
FinishBuild(tree);
|
||||
return texelCount;
|
||||
}
|
||||
|
||||
unsigned32 BaseTreePoolBuilder::GetTreeInfoBytesSize(const BaseTree* tree) const
|
||||
{
|
||||
assert(tree != NULL);
|
||||
bool hasAdditionalPointerBytes = HasAdditionalBytesPerPointer(tree);
|
||||
bool hasAdditionalBytesPerNode = HasAdditionalBytesPerNode(tree);
|
||||
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
size_t treeInfoSize = (hasAdditionalPointerBytes ? (depth + 1) : 0) // additional pointer sizes(if not 0, 8 bits = 1 byte per level)
|
||||
+ (hasAdditionalBytesPerNode ? (depth + 1) : 0) //and additional node sizes(if not 0, 8 bits = 1 byte per level)
|
||||
+ tree->GetAdditionalTreeInfoSize()
|
||||
+ (unsigned32)GetPoolInfoSize(tree)
|
||||
+ (unsigned32)GetAdditionalPoolInfoSize(tree);
|
||||
if (WordAligned()) RoundToWords(treeInfoSize);
|
||||
return (unsigned32)treeInfoSize;
|
||||
}
|
||||
|
||||
//************************************
|
||||
// Insert all nodes into final node pool and updates pointers
|
||||
//************************************
|
||||
bool BaseTreePoolBuilder::BuildPool(const BaseTree* tree, std::vector<unsigned8>& pool) {
|
||||
if (tree == NULL) return false;
|
||||
|
||||
// Notify subclasses that the build is initiating (so they can do pre-calculations)
|
||||
InitBuild(tree);
|
||||
mIsBuilding = true;
|
||||
|
||||
// Initialize the final node pool
|
||||
size_t poolSize = GetMinimumNodePoolTexelCount(tree);
|
||||
pool.resize(poolSize, 0);
|
||||
|
||||
// Get information about the node sizes per level
|
||||
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
std::vector<unsigned8> additionalBytesPerNode = tree->GetAdditionalBytesPerNode();
|
||||
bool hasAdditionalPointerBytes = HasAdditionalBytesPerPointer(tree);
|
||||
bool hasAdditionalBytesPerNode = HasAdditionalBytesPerNode(tree);
|
||||
bool lastChildHasAdditionalBytesPerPointer = LastChildHasAdditionalBytesPerPointer(tree);
|
||||
|
||||
// Find an ordering of the nodes such that all children of a node appear after that node in memory.
|
||||
unsigned32 nodeCount = (unsigned32)tree->GetNodeCount();
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
|
||||
std::vector<unsigned32> nodeMap(nodeCount);
|
||||
for (unsigned32 i = 0; i < nodeCount; i++) nodeMap[i] = i;
|
||||
OrderNodes(tree, nodeMap);
|
||||
std::vector<unsigned32> reverseNodeMap(nodeCount);
|
||||
for (unsigned32 i = 0; i < nodeCount; i++) reverseNodeMap[nodeMap[i]] = i;
|
||||
|
||||
// Calculate all node indices beforehand (also makes calculating the level offsets easy)
|
||||
std::vector<size_t> nodePointers(nodeCount);
|
||||
size_t treeInfoSize = GetTreeInfoBytesSize(tree);
|
||||
size_t curIndex = treeInfoSize;
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
unsigned32 nodeId = nodeMap[i];
|
||||
nodePointers[nodeId] = curIndex;
|
||||
curIndex += GetNodeSize(tree, nodeId);
|
||||
}
|
||||
|
||||
// Start building the pool.
|
||||
curIndex = 0;
|
||||
// First the (subclass specific) tree/pool information
|
||||
size_t poolInfoSize = GetPoolInfoSize(tree);
|
||||
if (poolInfoSize != 0)
|
||||
{
|
||||
std::vector<unsigned8> poolInfo = GetPoolInfo(tree, nodePointers, nodeMap);
|
||||
assert(poolInfo.size() == poolInfoSize);
|
||||
std::move(poolInfo.begin(), poolInfo.end(), pool.begin());
|
||||
curIndex += poolInfoSize;
|
||||
}
|
||||
|
||||
// Write the additional node bytes (size per level)
|
||||
if (hasAdditionalBytesPerNode)
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
pool[curIndex++] = additionalBytesPerNode[level];
|
||||
|
||||
// Write the additional pointer bytes (size per level)
|
||||
if (hasAdditionalPointerBytes)
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
pool[curIndex++] = additionalBytesPerPointer[level];
|
||||
|
||||
// Write the additional tree info from the tree itself (MultiRoot uses this for root pointers)
|
||||
// TODO: Find a better way to do this, don't let the tree itself determine part of how to pool looks
|
||||
assert(GetAdditionalTreeInfoStart(tree) == curIndex);
|
||||
std::vector<unsigned8> additionalTreeInfo = tree->GetAdditionalTreeInfo(nodePointers);
|
||||
assert(additionalTreeInfo.size() == tree->GetAdditionalTreeInfoSize());
|
||||
std::move(additionalTreeInfo.begin(), additionalTreeInfo.end(), pool.begin() + curIndex);
|
||||
curIndex += additionalTreeInfo.size();
|
||||
|
||||
// Write additional pool info (e.g. lookup tables)
|
||||
assert(GetAdditionalPoolInfoStart(tree) == curIndex);
|
||||
size_t additionalPoolInfoSize = GetAdditionalPoolInfoSize(tree);
|
||||
if (additionalPoolInfoSize != 0)
|
||||
{
|
||||
std::vector<unsigned8> additionalPoolInfo = GetAdditionalPoolInfo(tree, nodePointers, nodeMap);
|
||||
assert(additionalPoolInfo.size() == additionalPoolInfoSize);
|
||||
std::move(additionalPoolInfo.begin(), additionalPoolInfo.end(), pool.begin() + curIndex);
|
||||
curIndex += additionalPoolInfoSize;
|
||||
}
|
||||
|
||||
// Assert that the nodes start at the expected position
|
||||
if (WordAligned()) RoundToWords(curIndex);
|
||||
assert(curIndex == nodePointers[nodeMap[0]]);
|
||||
|
||||
// Write the nodes
|
||||
#ifdef _DEBUG
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
#else
|
||||
tbb::parallel_for((unsigned32)0, nodeCount, [&](const unsigned32& i)
|
||||
#endif
|
||||
|
||||
{
|
||||
unsigned32 nodeId = nodeMap[i];
|
||||
std::vector<unsigned8> bytesForNode = GetBytesForNode(tree, nodeId, nodePointers, reverseNodeMap, additionalBytesPerNode, additionalBytesPerPointer);
|
||||
assert(bytesForNode.size() == GetNodeSize(tree, nodeId));
|
||||
size_t nodeIndex = nodePointers[nodeId];
|
||||
std::move(bytesForNode.begin(), bytesForNode.end(), pool.begin() + nodeIndex);
|
||||
#ifdef _DEBUG
|
||||
}
|
||||
#else
|
||||
});
|
||||
#endif
|
||||
FinishBuild(tree);
|
||||
mIsBuilding = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
size_t BaseTreePoolBuilder::GetAdditionalTreeInfoStart(const BaseTree* tree) const
|
||||
{
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
std::vector<unsigned8> additionalBytesPerNode = tree->GetAdditionalBytesPerNode();
|
||||
bool hasAdditionalPointerBytes = false; for (auto abpp : additionalBytesPerPointer) if (abpp != 0) hasAdditionalPointerBytes = true;
|
||||
bool hasAdditionalBytesPerNode = false; for (auto abpn : additionalBytesPerNode) if (abpn != 0) hasAdditionalBytesPerNode = true;
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
size_t poolInfoSize = GetPoolInfoSize(tree);
|
||||
size_t additionalInfoSize = ((hasAdditionalBytesPerNode ? 1 : 0) + (hasAdditionalPointerBytes ? 1 : 0)) * (depth + 1);
|
||||
return poolInfoSize + additionalInfoSize;
|
||||
}
|
||||
|
||||
size_t BaseTreePoolBuilder::GetAdditionalPoolInfoStart(const BaseTree* tree) const
|
||||
{
|
||||
return GetAdditionalTreeInfoStart(tree) + tree->GetAdditionalTreeInfoSize();
|
||||
}
|
||||
|
||||
void BaseTreePoolBuilder::OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeOrder) const
|
||||
{
|
||||
tbb::parallel_sort(nodeOrder.begin(), nodeOrder.end(), [&](const unsigned32& i1, const unsigned32& i2)
|
||||
{
|
||||
const Node* a = tree->GetNode(i1);
|
||||
const Node* b = tree->GetNode(i2);
|
||||
return a->GetLevel() < b->GetLevel();
|
||||
});
|
||||
}
|
||||
|
||||
std::vector<unsigned8> BaseTreePoolBuilder::GetBytesForNode(const BaseTree* tree, const unsigned32& nodeId, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& reverseNodeMap,
|
||||
const std::vector<unsigned8>& fullAdditionalBytesPerNode, const std::vector<unsigned8>& fullAdditionalBytesPerPointer) const
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
unsigned8 childCount = node->GetChildCount();
|
||||
unsigned32* children = node->GetChildren();
|
||||
unsigned8 level = node->GetLevel();
|
||||
unsigned8 additionalBytesPerNode = fullAdditionalBytesPerNode[level];
|
||||
unsigned8 additionalBytesPerPointer = fullAdditionalBytesPerPointer[level];
|
||||
|
||||
size_t nodeSize = GetNodeSize(tree, nodeId);
|
||||
std::vector<unsigned8> bytes(nodeSize);
|
||||
size_t curIndex = 0;
|
||||
|
||||
// Add the childmask
|
||||
bytes[0] = node->GetChildmask().mask;
|
||||
curIndex++;
|
||||
|
||||
if (GetAdditionalPoolInfoForNodeSize(tree, nodeId) != 0)
|
||||
{
|
||||
auto additionalPoolInfoForNode = GetAdditionalPoolInfoForNode(tree, nodeId, reverseNodeMap[nodeId]);
|
||||
std::move(additionalPoolInfoForNode.begin(), additionalPoolInfoForNode.end(), bytes.begin() + curIndex);
|
||||
assert(additionalPoolInfoForNode.size() == GetAdditionalPoolInfoForNodeSize(tree, nodeId));
|
||||
curIndex += additionalPoolInfoForNode.size();
|
||||
}
|
||||
|
||||
// Add the additional node bytes
|
||||
if (additionalBytesPerNode != 0)
|
||||
{
|
||||
std::vector<unsigned8> additionalNodeBytes = tree->GetAdditionalNodeBytes(node);
|
||||
assert(additionalNodeBytes.size() == additionalBytesPerNode);
|
||||
std::move(additionalNodeBytes.begin(), additionalNodeBytes.end(), bytes.begin() + curIndex);
|
||||
curIndex += additionalNodeBytes.size();
|
||||
}
|
||||
if (WordAligned()) RoundToWords(curIndex);
|
||||
|
||||
// Write the node pointers
|
||||
for (ChildIndex c = 0; c < childCount; c++)
|
||||
{
|
||||
unsigned32 childNodeIndex = children[c];
|
||||
size_t pointer = nodePointers[childNodeIndex];
|
||||
std::vector<unsigned8> pointerBytes = WrapPointer(tree, childNodeIndex, reverseNodeMap[childNodeIndex], (unsigned32)pointer);
|
||||
std::move(pointerBytes.begin(), pointerBytes.end(), bytes.begin() + curIndex);
|
||||
curIndex += pointerBytes.size();
|
||||
if (WordAligned()) RoundToWords(curIndex);
|
||||
}
|
||||
|
||||
// Followed by the additional pointer info
|
||||
if (additionalBytesPerPointer != 0)
|
||||
{
|
||||
ChildIndex i = 0;
|
||||
for (ChildIndex c = 0; c < 8; c++)
|
||||
{
|
||||
if (node->HasChild(c) && (i < childCount - 1 || LastChildHasAdditionalBytesPerPointer(tree)))
|
||||
{
|
||||
i++;
|
||||
std::vector<unsigned8> additionalBytesForPointer = tree->GetAdditionalPointerBytes(node, c);
|
||||
if (WordAligned())
|
||||
{
|
||||
curIndex += additionalBytesForPointer.size();
|
||||
RoundToWords(curIndex);
|
||||
curIndex -= additionalBytesForPointer.size();
|
||||
}
|
||||
std::move(additionalBytesForPointer.begin(), additionalBytesForPointer.end(), bytes.begin() + curIndex);
|
||||
curIndex += additionalBytesForPointer.size();
|
||||
if (WordAligned()) RoundToWords(curIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
bool BaseTreePoolBuilder::VerifyPool(std::vector<unsigned8>& pool, const unsigned8& treeDepth) const
|
||||
{
|
||||
// Verify that the last level offset is not bigger than the size of the pool
|
||||
for (unsigned8 level = 0; level <= treeDepth; level++)
|
||||
{
|
||||
unsigned32 levelOffset = 0;
|
||||
BitHelper::JoinBytes(pool, levelOffset, level * 4);
|
||||
if (levelOffset > pool.size()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
112
Research/scene/PoolBuilder/BaseTreePoolBuilder.h
Normal file
112
Research/scene/PoolBuilder/BaseTreePoolBuilder.h
Normal file
@@ -0,0 +1,112 @@
|
||||
#pragma once
|
||||
#include <vector>
|
||||
#include "BasePoolBuilder.h"
|
||||
#include "../Octree/BaseTree.h"
|
||||
|
||||
// Pool builder class that can be used to build a pool for a specific kind of tree. Constructor should indicate the tree type.
|
||||
class BaseTreePoolBuilder : public BasePoolBuilder<BaseTree>
|
||||
{
|
||||
public:
|
||||
BaseTreePoolBuilder() { mIsBuilding = false; }
|
||||
virtual ~BaseTreePoolBuilder() override {}
|
||||
|
||||
size_t GetPoolSize(const BaseTree* tree) override;
|
||||
bool BuildPool(const BaseTree* tree, std::vector<unsigned8>& pool) override;
|
||||
bool VerifyPool(std::vector<unsigned8>& pool, const unsigned8& depth) const override;
|
||||
|
||||
size_t GetMinimumNodePoolTexelCount(const BaseTree* tree);
|
||||
unsigned32 GetTreeInfoBytesSize(const BaseTree* tree) const;
|
||||
|
||||
static const size_t WORD_SIZE = 4;
|
||||
protected:
|
||||
inline static void RoundToWords(size_t& value) { value += (WORD_SIZE - (value % WORD_SIZE)) % WORD_SIZE; }
|
||||
|
||||
// Returns the size of a node without the pointers (but with the additional pointer, node and nodepool information if needed)
|
||||
inline size_t GetBaseNodeSize(const BaseTree* tree, const unsigned32& nodeId) const
|
||||
{
|
||||
auto node = tree->GetNode(nodeId);
|
||||
unsigned8 level = node->GetLevel();
|
||||
size_t additionalPointerBytes = 0;
|
||||
if (node->GetChildCount() > 0)
|
||||
{
|
||||
size_t additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer(level);
|
||||
// Word-align additional bytes per pointer
|
||||
if (WordAligned()) RoundToWords(additionalBytesPerPointer);
|
||||
additionalPointerBytes = (node->GetChildCount() - (LastChildHasAdditionalBytesPerPointer(tree) ? 0 : 1)) * additionalBytesPerPointer;
|
||||
}
|
||||
size_t basicNodeSize = 1 + tree->GetAdditionalBytesPerNode(level) + GetAdditionalPoolInfoForNodeSize(tree, nodeId);
|
||||
if (WordAligned()) RoundToWords(basicNodeSize);
|
||||
return additionalPointerBytes + basicNodeSize;
|
||||
}
|
||||
|
||||
inline size_t GetNodeSize(const BaseTree* tree, const unsigned32& nodeId) const
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
size_t nodeSize = GetBaseNodeSize(tree, nodeId);
|
||||
unsigned32* children = node->GetChildren();
|
||||
for (ChildIndex c = 0; c < node->GetChildCount(); c++)
|
||||
{
|
||||
size_t pointerSize = GetBytesPerPointer(tree, children[c]);
|
||||
if (WordAligned()) RoundToWords(pointerSize);
|
||||
nodeSize += pointerSize;
|
||||
}
|
||||
return nodeSize;
|
||||
}
|
||||
|
||||
inline bool HasAdditionalBytesPerNode(const BaseTree* tree) const
|
||||
{
|
||||
std::vector<unsigned8> additionalBytesPerNode = tree->GetAdditionalBytesPerNode();
|
||||
bool hasAdditionalBytesPerNode = false; for (auto abpn : additionalBytesPerNode) if (abpn != 0) hasAdditionalBytesPerNode = true;
|
||||
return hasAdditionalBytesPerNode;
|
||||
}
|
||||
|
||||
inline bool HasAdditionalBytesPerPointer(const BaseTree* tree) const
|
||||
{
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
bool hasAdditionalBytesPerPointer = false; for (auto abpp : additionalBytesPerPointer) if (abpp != 0) hasAdditionalBytesPerPointer = true;
|
||||
return hasAdditionalBytesPerPointer;
|
||||
}
|
||||
|
||||
inline bool LastChildHasAdditionalBytesPerPointer(const BaseTree* tree) const
|
||||
{
|
||||
return tree->LastChildHasAdditionalBytes();
|
||||
}
|
||||
|
||||
// Use this method to pre-calculate information needed to build the tree
|
||||
virtual void InitBuild(const BaseTree* tree) = 0;
|
||||
// Use this method to finalize the build process and clear resources
|
||||
virtual void FinishBuild(const BaseTree* tree) = 0;
|
||||
|
||||
// Subtrees can use this to ask for the pool to be word aligned. If a subclass returns true, additional pointer and pool sizes will be rounded to word sizes (i.e. 4 bytes)
|
||||
virtual bool WordAligned() const = 0;
|
||||
|
||||
// Should return the number of bytes required for a pointer to the the node with nodeIndex.
|
||||
virtual unsigned8 GetBytesPerPointer(const BaseTree* tree, const unsigned32& nodeIndex) const = 0;
|
||||
|
||||
// Should return the bytes containing a pointer to the node with the given index.
|
||||
virtual std::vector<unsigned8> WrapPointer(const BaseTree* root, const unsigned32& nodeIndex, const unsigned32& indexInPool, const unsigned32& pointer) const = 0;
|
||||
|
||||
// Pool info required for the tree. The size of this should be determined only by the depth of the tree.
|
||||
virtual size_t GetPoolInfoSize(const BaseTree* tree) const = 0;
|
||||
// Pool info required for the tree (e.g. level offsets in memory, pointer sizes per level).
|
||||
virtual std::vector<unsigned8> GetPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder) = 0;
|
||||
|
||||
// Additional pool info of variable size, such as lookup tables.
|
||||
virtual size_t GetAdditionalPoolInfoSize(const BaseTree* tree) const { return 0; }
|
||||
// Additional pool info of variable size, such as lookup tables.
|
||||
virtual std::vector<unsigned8> GetAdditionalPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder) { return std::vector<unsigned8>(); }
|
||||
|
||||
virtual unsigned8 GetAdditionalPoolInfoForNodeSize(const BaseTree* tree, const unsigned32& nodeIndex) const { return 0; }
|
||||
virtual std::vector<unsigned8> GetAdditionalPoolInfoForNode(const BaseTree* tree, const unsigned32& nodeIndex, const unsigned32& indexInPool) const { return std::vector<unsigned8>(); }
|
||||
|
||||
// Can be used to change the order of the nodes in memory.
|
||||
virtual void OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeOrder) const;
|
||||
|
||||
std::vector<unsigned8> GetBytesForNode(const BaseTree* tree,const unsigned32& nodeId, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& reverseNodeMap,
|
||||
const std::vector<unsigned8>& additionalBytesPerNode, const std::vector<unsigned8>& additionalBytesPerPointer) const;
|
||||
|
||||
size_t GetAdditionalTreeInfoStart(const BaseTree* tree) const;
|
||||
size_t GetAdditionalPoolInfoStart(const BaseTree* tree) const;
|
||||
|
||||
bool mIsBuilding;
|
||||
};
|
||||
50
Research/scene/PoolBuilder/OriginalPoolBuilder.h
Normal file
50
Research/scene/PoolBuilder/OriginalPoolBuilder.h
Normal file
@@ -0,0 +1,50 @@
|
||||
#pragma once
|
||||
#include "BaseTreePoolBuilder.h"
|
||||
|
||||
class OriginalPoolBuilder : public BaseTreePoolBuilder
|
||||
{
|
||||
|
||||
public:
|
||||
using BaseTreePoolBuilder::BaseTreePoolBuilder;
|
||||
virtual ~OriginalPoolBuilder() override {}
|
||||
|
||||
std::string GetFullFileName(const std::string& fileName) const override
|
||||
{
|
||||
return fileName + ".o.pool";
|
||||
}
|
||||
protected:
|
||||
void InitBuild(const BaseTree* tree) override {}
|
||||
void FinishBuild(const BaseTree* tree) override {}
|
||||
bool WordAligned() const override { return true; }
|
||||
|
||||
unsigned8 GetBytesPerPointer(const BaseTree* tree, const unsigned32& nodeId) const override
|
||||
{
|
||||
// All pointers are 4 bytes
|
||||
return 4;
|
||||
}
|
||||
std::vector<unsigned8> WrapPointer(const BaseTree* root, const unsigned32& nodeIndex, const unsigned32& indexInPool, const unsigned32& pointer) const override
|
||||
{
|
||||
return BitHelper::SplitInBytes(pointer);
|
||||
}
|
||||
|
||||
size_t GetPoolInfoSize(const BaseTree* tree) const override { return 0; }
|
||||
std::vector<unsigned8> GetPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder) override { return std::vector<unsigned8>(); }
|
||||
|
||||
unsigned8 GetAdditionalPoolInfoForNodeSize(const BaseTree* tree, const unsigned32& nodeIndex) const override
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeIndex);
|
||||
unsigned8 level = node->GetLevel();
|
||||
unsigned8 additionalBytes = tree->GetAdditionalBytesPerNode(level);
|
||||
// The original paper uses 32 bit as the atomic unit. Therefore they have 24 unused bits after each childmask. We put these in here for correctness.
|
||||
// However, if they are used to store, for example, color information (i.e., additionalBytes != 0), we use them for that.
|
||||
if (additionalBytes <= 3) return 3 - additionalBytes;
|
||||
else return 0;
|
||||
}
|
||||
|
||||
std::vector<unsigned8> GetAdditionalPoolInfoForNode(const BaseTree* tree, const unsigned32& nodeIndex, const unsigned32& indexInPool) const override
|
||||
{
|
||||
return std::vector<unsigned8>(GetAdditionalPoolInfoForNodeSize(tree, nodeIndex), 0);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
135
Research/scene/PoolBuilder/StandardPoolBuilder.cpp
Normal file
135
Research/scene/PoolBuilder/StandardPoolBuilder.cpp
Normal file
@@ -0,0 +1,135 @@
|
||||
#include "StandardPoolBuilder.h"
|
||||
#include <algorithm>
|
||||
#include "../../inc/tbb/parallel_sort.h"
|
||||
|
||||
std::string StandardPoolBuilder::GetFullFileName(const std::string& fileName) const
|
||||
{
|
||||
return fileName + ".s.pool";
|
||||
}
|
||||
|
||||
std::vector<unsigned8> StandardPoolBuilder::GetPointerSizesPerLevel(const BaseTree* tree) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
std::vector<unsigned8> res(depth + 1);
|
||||
res[depth] = 0; // Pointers in the leafs have size 0;
|
||||
for (unsigned8 level = depth; level > 0; level--)
|
||||
{
|
||||
unsigned8 bytesPerPointer = res[level];
|
||||
size_t requiredBytesNextLevel = 0;
|
||||
for (unsigned32 i = 0; i < (unsigned32)tree->GetNodeCount(); i++)
|
||||
{
|
||||
auto node = tree->GetNode(i);
|
||||
if (node->GetLevel() == level)
|
||||
requiredBytesNextLevel += GetBaseNodeSize(tree, i) + node->GetChildCount() * bytesPerPointer;
|
||||
}
|
||||
res[level - 1] = BitHelper::RoundToBytes(std::max<unsigned8>(1, BitHelper::Log2Ceil(requiredBytesNextLevel))) / 8;
|
||||
}
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
printf("Pointers in level %u: %u bytes\n", level, res[level]);
|
||||
return res;
|
||||
}
|
||||
|
||||
void StandardPoolBuilder::CalculatePointerSizesPerLevel(const BaseTree* tree)
|
||||
{
|
||||
assert(tree != NULL);
|
||||
mPointerSizesPerLevel = GetPointerSizesPerLevel(tree);
|
||||
}
|
||||
|
||||
void StandardPoolBuilder::InitBuild(const BaseTree* tree)
|
||||
{
|
||||
CalculatePointerSizesPerLevel(tree);
|
||||
mIsBuildingTree = true;
|
||||
}
|
||||
void StandardPoolBuilder::FinishBuild(const BaseTree* tree)
|
||||
{
|
||||
ClearVariables();
|
||||
mIsBuildingTree = false;
|
||||
}
|
||||
|
||||
unsigned8 StandardPoolBuilder::GetBytesPerPointer(const BaseTree* tree, const unsigned32& nodeIndex) const
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeIndex);
|
||||
if (node->GetLevel() == 0) return 0;
|
||||
return mPointerSizesPerLevel[node->GetLevel() - 1];
|
||||
}
|
||||
|
||||
size_t StandardPoolBuilder::GetPoolInfoSize(const BaseTree* tree) const
|
||||
{
|
||||
assert(tree != NULL);
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
return (depth + 1) * 4 // Leave some space for the level offsets (32 bits = 4 bytes per level),
|
||||
+ (depth + 1); // Space for the size of the node pointers
|
||||
}
|
||||
|
||||
std::vector<unsigned8> StandardPoolBuilder::GetPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder)
|
||||
{
|
||||
//// Go through the level (in order), keeping track of the indices
|
||||
//for (unsigned8 level = 0; level <= depth; level++)
|
||||
//{
|
||||
// auto levelStart = levelIndices[level];
|
||||
// auto levelEnd = levelIndices[level + 1];
|
||||
// levelOffsets[level] = (unsigned32)curIndex;
|
||||
// for (auto i = levelStart; i < levelEnd; i++)
|
||||
// {
|
||||
// Node* node = tree->GetNode(nodeOrder[i]);
|
||||
// nodePointers[node->GetIndex()] = curIndex - levelOffsets[level];
|
||||
// assert(level == 0 || nodePointers[node->GetIndex()] < BitHelper::Exp2(bytesPerPointer[level - 1] * 8)); // Assert the index fits
|
||||
// curIndex += 1 + additionalBytesPerNode[level] + node->GetChildCount() * (bytesPerPointer[level] + additionalBytesPerPointer[level]);
|
||||
// }
|
||||
//}
|
||||
mLevelOffsets = std::vector<unsigned32>(tree->GetMaxLevel() + 1);
|
||||
unsigned8 curLevel = 255;
|
||||
for (size_t i = 0; i < tree->GetNodeCount(); i++)
|
||||
{
|
||||
unsigned32 nodeId = nodeOrder[i];
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
if (node->GetLevel() != curLevel)
|
||||
{
|
||||
curLevel++;
|
||||
mLevelOffsets[curLevel] = (unsigned32)nodePointers[nodeId];
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<unsigned8> res(GetPoolInfoSize(tree));
|
||||
size_t curIndex = 0;
|
||||
// Write the level offsets
|
||||
for (unsigned8 level = 0; level <= tree->GetMaxLevel(); level++)
|
||||
{
|
||||
BitHelper::SplitInBytesAndMove(mLevelOffsets[level], res, curIndex);
|
||||
curIndex += 4;
|
||||
}
|
||||
|
||||
// Write the number of bytes per pointer
|
||||
for (unsigned8 level = 0; level <= tree->GetMaxLevel(); level++)
|
||||
res[curIndex++] = mPointerSizesPerLevel[level];
|
||||
return res;
|
||||
}
|
||||
|
||||
std::vector<unsigned8> StandardPoolBuilder::WrapPointer(const BaseTree* tree, const unsigned32& nodeIndex, const unsigned32& indexInPool, const unsigned32& pointer) const
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeIndex);
|
||||
unsigned8 nodeLevel = node->GetLevel();
|
||||
unsigned32 withinLevelPointer = pointer - mLevelOffsets[nodeLevel];
|
||||
return BitHelper::SplitInBytes(withinLevelPointer, mPointerSizesPerLevel[nodeLevel - 1]);
|
||||
}
|
||||
|
||||
void StandardPoolBuilder::ClearVariables()
|
||||
{
|
||||
mPointerSizesPerLevel.clear();
|
||||
mLevelOffsets.clear();
|
||||
}
|
||||
|
||||
//void StandardPoolBuilder::OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeOrder) const
|
||||
//{
|
||||
// std::vector<size_t> parentCounts = tree->GetParentCounts();
|
||||
// // First order on level (asc), then on number of parents (desc), so that the most used nodes have the smallest pointers
|
||||
// tbb::parallel_sort(nodeOrder.begin(), nodeOrder.end(), [tree, parentCounts](const unsigned32& i1, const unsigned32& i2)
|
||||
// {
|
||||
// Node* a = tree->GetNode(i1);
|
||||
// Node* b = tree->GetNode(i2);
|
||||
// if (a->GetLevel() != b->GetLevel()) return a->GetLevel() < b->GetLevel();
|
||||
// if (parentCounts[i1] != parentCounts[i2]) return parentCounts[i1] > parentCounts[i2];
|
||||
// // If the level and number of parents is the same, then, for consistency, order on nodeID.
|
||||
// return i1 < i2;
|
||||
// });
|
||||
//}
|
||||
35
Research/scene/PoolBuilder/StandardPoolBuilder.h
Normal file
35
Research/scene/PoolBuilder/StandardPoolBuilder.h
Normal file
@@ -0,0 +1,35 @@
|
||||
#pragma once
|
||||
#include "BaseTreePoolBuilder.h"
|
||||
|
||||
class StandardPoolBuilder : public BaseTreePoolBuilder
|
||||
{
|
||||
|
||||
public:
|
||||
using BaseTreePoolBuilder::BaseTreePoolBuilder;
|
||||
virtual ~StandardPoolBuilder() override {}
|
||||
|
||||
std::string GetFullFileName(const std::string& fileName) const override;
|
||||
std::vector<unsigned8> GetPointerSizesPerLevel(const BaseTree* tree) const;
|
||||
protected:
|
||||
void InitBuild(const BaseTree* tree) override;
|
||||
void FinishBuild(const BaseTree* tree) override;
|
||||
bool WordAligned() const override { return false; }
|
||||
|
||||
unsigned8 GetBytesPerPointer(const BaseTree* tree, const unsigned32& nodeId) const override;
|
||||
std::vector<unsigned8> WrapPointer(const BaseTree* root, const unsigned32& nodeIndex, const unsigned32& indexInPool, const unsigned32& pointer) const override;
|
||||
|
||||
size_t GetPoolInfoSize(const BaseTree* tree) const override;
|
||||
std::vector<unsigned8> GetPoolInfo(const BaseTree* tree, const std::vector<size_t>& nodePointers, const std::vector<unsigned32>& nodeOrder) override;
|
||||
|
||||
//void StandardPoolBuilder::OrderNodes(const BaseTree* tree, std::vector<unsigned32>& nodeOrder) const override;
|
||||
|
||||
void CalculatePointerSizesPerLevel(const BaseTree* tree);
|
||||
void ClearVariables();
|
||||
// Variables used during the current build
|
||||
bool mIsBuildingTree = false;
|
||||
std::vector<unsigned8> mPointerSizesPerLevel;
|
||||
std::vector<unsigned32> mLevelOffsets;
|
||||
|
||||
|
||||
};
|
||||
|
||||
507
Research/scene/PoolBuilder/VirtualNodePoolBuilder.cpp
Normal file
507
Research/scene/PoolBuilder/VirtualNodePoolBuilder.cpp
Normal file
@@ -0,0 +1,507 @@
|
||||
#include "VirtualNodePoolBuilder.h"
|
||||
#include <algorithm>
|
||||
#include "../../inc/tbb/parallel_sort.h"
|
||||
#include "../../core/Util/BoolArray.h"
|
||||
#include <numeric>
|
||||
|
||||
std::string VirtualNodePoolBuilder::GetFullFileName(const std::string& filename) const
|
||||
{
|
||||
return filename + ".v.pool";
|
||||
}
|
||||
|
||||
size_t VirtualNodePoolBuilder::GetFullNodeSize(const BaseTree* tree, const unsigned8& level, const unsigned8& pointerSize) const
|
||||
{
|
||||
// 1 byte for childmask
|
||||
// 1 byte for "virtual mask" (indicating which nodes are virtual)
|
||||
// pointerSize bytes for pointer to the first child
|
||||
// + Additional node info
|
||||
if (level > tree->GetMaxLevel()) return 0;
|
||||
return 1 + 1 + pointerSize + tree->GetAdditionalBytesPerNode(level);
|
||||
}
|
||||
size_t VirtualNodePoolBuilder::GetVirtualNodeSize(const BaseTree* tree, const unsigned8& level, const unsigned8& pointerSize) const
|
||||
{
|
||||
if (level == 0) return 0;
|
||||
return pointerSize;
|
||||
}
|
||||
|
||||
size_t VirtualNodePoolBuilder::GetFullNodeSize(const BaseTree* tree, const unsigned8& level, const std::vector<unsigned8>& pointerSizesPerLevel) const
|
||||
{
|
||||
if (level > tree->GetMaxLevel()) return 0;
|
||||
return GetFullNodeSize(tree, level, pointerSizesPerLevel[level]);
|
||||
}
|
||||
|
||||
size_t VirtualNodePoolBuilder::GetVirtualNodeSize(const BaseTree* tree, const unsigned8& level, const std::vector<unsigned8>& pointerSizesPerLevel) const
|
||||
{
|
||||
// pointerSize bytes for pointer to the first child
|
||||
return GetVirtualNodeSize(tree, level, pointerSizesPerLevel[level - 1]);
|
||||
}
|
||||
|
||||
size_t VirtualNodePoolBuilder::GetNormalNodeSize(const BaseTree* tree, const unsigned32& nodeId, const std::vector<unsigned8>& pointerSizesPerLevel, std::vector<unsigned8>& additionalPointerInfoSizesPerLevel, const bool& includingAdditionalPointerInfo) const
|
||||
{
|
||||
// 1 byte for childmask
|
||||
// Additional node info
|
||||
// pointerSize bytes for each pointer needed.
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
unsigned8 level = node->GetLevel();
|
||||
return 1 + tree->GetAdditionalBytesPerNode(level) + node->GetChildCount() * pointerSizesPerLevel[level] +
|
||||
(includingAdditionalPointerInfo ? (additionalPointerInfoSizesPerLevel[level] * node->GetChildCount()) : 0);
|
||||
}
|
||||
|
||||
std::vector<unsigned8> VirtualNodePoolBuilder::CalculatePointerSizesPerLevel(const BaseTree* tree, const std::vector<size_t>& parentsPerNode, const std::vector<bool>& useVirtualNodes) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
|
||||
// Calculate some counts per level (needed to calculate the size of each level)
|
||||
std::vector<size_t> virtualNodesPerLevel = CalculateVirtualNodesPerLevel(tree, parentsPerNode);
|
||||
std::vector<size_t> fullNodesPerLevel = CalculateFullNodesPerLevel(tree);
|
||||
std::vector<size_t> pointersToLevel = CalculatePointersToPerLevel(tree);
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
|
||||
// Now bottom-up calculate the pointer sizees required to point to each level
|
||||
std::vector<unsigned8> res(depth + 1, 0);
|
||||
for (unsigned8 level = depth; level > 0; level--)
|
||||
{
|
||||
// Keep increasing the pointersize until we can point to all nodes within a level
|
||||
bool fits = false;
|
||||
while (!fits)
|
||||
{
|
||||
res[level - 1]++;
|
||||
size_t requiredSize = CalculateSizeOfLevel(tree, level, virtualNodesPerLevel[level], fullNodesPerLevel[level],
|
||||
pointersToLevel[level], level == depth ? 0 : pointersToLevel[level + 1],
|
||||
level == 0 ? 0 : res[level - 1], res[level],
|
||||
level == 0 ? 0 : additionalBytesPerPointer[level - 1], additionalBytesPerPointer[level],
|
||||
useVirtualNodes[level], level == 0 ? 0 : useVirtualNodes[level - 1]);
|
||||
size_t availableSize = BitHelper::Exp2(res[level - 1] * 8); // Available size is how much bytes we can reach with a pointer
|
||||
fits = requiredSize < availableSize;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::vector<size_t> VirtualNodePoolBuilder::CalculateVirtualNodesPerLevel(const BaseTree* tree, const std::vector<size_t>& parentsPerNode) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
unsigned32 nodeCount = (unsigned32)tree->GetNodeCount();
|
||||
std::vector<size_t> virtualNodesPerLevel(depth + 1);
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
const Node* node = tree->GetNode(i);
|
||||
unsigned8 level = node->GetLevel();
|
||||
if (parentsPerNode[i] > 1)
|
||||
virtualNodesPerLevel[level] += parentsPerNode[i] - 1;
|
||||
}
|
||||
return virtualNodesPerLevel;
|
||||
}
|
||||
std::vector<size_t> VirtualNodePoolBuilder::CalculateFullNodesPerLevel(const BaseTree* tree) const
|
||||
{
|
||||
// Every node appears exactly once in full
|
||||
return tree->GetNodesPerLevel();
|
||||
}
|
||||
std::vector<size_t> VirtualNodePoolBuilder::CalculatePointersToPerLevel(const BaseTree* tree) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
unsigned32 nodeCount = (unsigned32)tree->GetNodeCount();
|
||||
std::vector<size_t> pointersToPerLevel(depth + 1);
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
const Node* node = tree->GetNode(i);
|
||||
unsigned8 level = node->GetLevel();
|
||||
if (level < depth)
|
||||
pointersToPerLevel[level + 1] += node->GetChildCount();
|
||||
}
|
||||
return pointersToPerLevel;
|
||||
}
|
||||
size_t VirtualNodePoolBuilder::CalculateSizeOfLevel(const BaseTree* tree, const unsigned8& level,
|
||||
const size_t& virtualNodesThisLevel, const size_t& fullNodesThisLevel, const size_t& pointersToThisLevel, const size_t& pointersFromThisLevel,
|
||||
const unsigned8& pointerSizeToThisLevel, const unsigned8& pointerSizeFromThisLevel,
|
||||
const unsigned8& additionalBytesPointersToThisLevel, const unsigned8& additionalBytesPointersFromThisLevel,
|
||||
const bool& useVirtualNodesThisLevel, const bool& useVirtualNodesPreviousLevel) const
|
||||
{
|
||||
size_t requiredSize = 0;
|
||||
// Calculate size of virtual nodes placed in this level by the previous level
|
||||
if (useVirtualNodesPreviousLevel)
|
||||
requiredSize += virtualNodesThisLevel * GetVirtualNodeSize(tree, level, pointerSizeToThisLevel) + pointersToThisLevel * additionalBytesPointersToThisLevel;
|
||||
// Calculate the size of full nodes (or normal nodes) occupying this level
|
||||
if (useVirtualNodesThisLevel)
|
||||
requiredSize += fullNodesThisLevel * GetFullNodeSize(tree, level, pointerSizeFromThisLevel);
|
||||
else
|
||||
requiredSize += (1 + tree->GetAdditionalBytesPerNode(level)) * fullNodesThisLevel + pointersFromThisLevel * pointerSizeFromThisLevel + pointersFromThisLevel * additionalBytesPointersFromThisLevel;
|
||||
return requiredSize;
|
||||
}
|
||||
std::vector<size_t> VirtualNodePoolBuilder::CalculateSizePerLevel(const BaseTree* tree, const std::vector<unsigned8> pointerSizesPerLevel, const std::vector<size_t>& parentsPerNode, const std::vector<bool>& useVirtualNodes) const
|
||||
{
|
||||
// Calculate some statistics needed to find the size of each level in memory
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
std::vector<size_t> virtualNodesPerLevel = CalculateVirtualNodesPerLevel(tree, parentsPerNode);
|
||||
std::vector<size_t> fullNodesPerLevel = CalculateFullNodesPerLevel(tree);
|
||||
std::vector<size_t> pointersToLevel = CalculatePointersToPerLevel(tree);
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
|
||||
// Calculate the actual size per level
|
||||
std::vector<size_t> sizePerLevel(depth + 1);
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
sizePerLevel[level] = CalculateSizeOfLevel(tree, level, virtualNodesPerLevel[level], fullNodesPerLevel[level],
|
||||
pointersToLevel[level], level == depth ? 0 : pointersToLevel[level + 1],
|
||||
level == 0 ? 0 : pointerSizesPerLevel[level - 1], pointerSizesPerLevel[level],
|
||||
level == 0 ? 0 : additionalBytesPerPointer[level - 1], additionalBytesPerPointer[level],
|
||||
useVirtualNodes[level], level == 0 ? 0 : useVirtualNodes[level - 1]);
|
||||
return sizePerLevel;
|
||||
}
|
||||
std::vector<size_t> VirtualNodePoolBuilder::CalculateApproximateSizePerLevelVirtualNodes(const BaseTree* tree, const std::vector<size_t>& parentsPerNode) const
|
||||
{
|
||||
// Calculate some statistics needed to find the size of each level in memory
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
std::vector<size_t> virtualNodesPerLevel = CalculateVirtualNodesPerLevel(tree, parentsPerNode);
|
||||
std::vector<size_t> fullNodesPerLevel = CalculateFullNodesPerLevel(tree);
|
||||
std::vector<size_t> pointersToLevel = CalculatePointersToPerLevel(tree);
|
||||
std::vector<unsigned8> pointerSizesPerLevel(depth + 1, 4); // Assume 4 bytes pointers per level
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
|
||||
// Calculate the actual size per level
|
||||
std::vector<size_t> sizePerLevel(depth + 1);
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
sizePerLevel[level] =
|
||||
fullNodesPerLevel[level] * GetFullNodeSize(tree, level, pointerSizesPerLevel) + // Full nodes size
|
||||
(level == depth ? 0 : (virtualNodesPerLevel[level + 1] * GetVirtualNodeSize(tree, level + 1, pointerSizesPerLevel))) + // Virtual nodes size
|
||||
(level == depth ? 0 : (pointersToLevel[level + 1] * additionalBytesPerPointer[level])); // additional pointer bytes size
|
||||
return sizePerLevel;
|
||||
}
|
||||
std::vector<size_t> VirtualNodePoolBuilder::CalculateApproximateSizePerLevelStandardNodes(const BaseTree* tree) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
unsigned32 nodeCount = (unsigned32)tree->GetNodeCount();
|
||||
std::vector<unsigned8> pointerSizesPerLevel(depth + 1, 4); // Assume 4 bytes pointers per level
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
std::vector<size_t> sizePerLevel(depth + 1);
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
const Node* node = tree->GetNode(i);
|
||||
unsigned8 level = node->GetLevel();
|
||||
sizePerLevel[level] += GetNormalNodeSize(tree, i, pointerSizesPerLevel, additionalBytesPerPointer, true);;
|
||||
}
|
||||
return sizePerLevel;
|
||||
}
|
||||
std::vector<bool> VirtualNodePoolBuilder::DecideVirtualPointersPerLevel(const BaseTree* tree, const std::vector<size_t>& parentsPerNode) const
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
std::vector<size_t> sizePerStandardLevel = CalculateApproximateSizePerLevelStandardNodes(tree);
|
||||
std::vector<size_t> sizePerVirtualNodesLevel = CalculateApproximateSizePerLevelVirtualNodes(tree, parentsPerNode);
|
||||
std::vector<bool> useVirtualNodes(depth + 1);
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
useVirtualNodes[level] = sizePerVirtualNodesLevel[level] < sizePerStandardLevel[level];
|
||||
//useVirtualNodes = std::vector<bool>(depth + 1, false);
|
||||
//useVirtualNodes[0] = true;
|
||||
//useVirtualNodes[1] = true;
|
||||
//useVirtualNodes[2] = true;
|
||||
return useVirtualNodes;
|
||||
}
|
||||
size_t VirtualNodePoolBuilder::CalculatePoolInfoSize(const BaseTree* tree)
|
||||
{
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
// Each tree contains at least the level offsets (4 bytes per level) and pointer sizes per level (1 byte per level)
|
||||
// 1 byte per level for sizes of additional information per level
|
||||
// 4 bytes are used to indicate which levels use virtual nodes
|
||||
size_t poolInfoSize = (depth + 1) * 5 + 4;
|
||||
if (HasAdditionalBytesPerNode(tree)) poolInfoSize += depth + 1;
|
||||
if (HasAdditionalBytesPerPointer(tree)) poolInfoSize += depth + 1;
|
||||
// Additional pool info from the tree
|
||||
poolInfoSize += tree->GetAdditionalTreeInfoSize();
|
||||
return poolInfoSize;
|
||||
}
|
||||
size_t VirtualNodePoolBuilder::GetPoolSize(const BaseTree* tree)
|
||||
{
|
||||
// Calculate the pool info size
|
||||
size_t minSize = CalculatePoolInfoSize(tree);
|
||||
|
||||
// Calculate the main pool size
|
||||
std::vector<size_t> parentsPerNode = tree->GetParentCounts();
|
||||
std::vector<bool> useVirtualNodes = DecideVirtualPointersPerLevel(tree, parentsPerNode);
|
||||
std::vector<unsigned8> pointerSizesPerLevel = CalculatePointerSizesPerLevel(tree, parentsPerNode, useVirtualNodes);
|
||||
std::vector<size_t> sizesPerLevel = CalculateSizePerLevel(tree, pointerSizesPerLevel, parentsPerNode, useVirtualNodes);
|
||||
minSize += std::accumulate(sizesPerLevel.begin(), sizesPerLevel.end(), size_t(0));
|
||||
|
||||
std::vector<size_t> virtualNodesPerLevel = CalculateVirtualNodesPerLevel(tree, parentsPerNode);
|
||||
std::vector<size_t> fullNodesPerLevel = CalculateFullNodesPerLevel(tree);
|
||||
size_t virtualNodesSum = 0;
|
||||
size_t fullNodesSum = 0;
|
||||
size_t normalNodesSum = 0;
|
||||
for (unsigned8 level = 0; level < tree->GetMaxLevel(); level++)
|
||||
{
|
||||
if (useVirtualNodes[level]) fullNodesSum += fullNodesPerLevel[level];
|
||||
else normalNodesSum += fullNodesPerLevel[level];
|
||||
if (level > 0 && useVirtualNodes[level - 1]) virtualNodesSum += virtualNodesPerLevel[level - 1];
|
||||
}
|
||||
printf("Virtual nodes: %llu, Complete nodes: %llu, Normal Nodes: %llu, Percentage virtual: %f\n", (unsigned64)virtualNodesSum, (unsigned64)fullNodesSum, (unsigned64)normalNodesSum, (double(virtualNodesSum) / double(virtualNodesSum + fullNodesSum)) * 100.0);
|
||||
return minSize;
|
||||
}
|
||||
|
||||
//************************************
|
||||
// Insert all nodes into final node pool and updates pointers
|
||||
//************************************
|
||||
bool VirtualNodePoolBuilder::BuildPool(const BaseTree* tree, std::vector<unsigned8>& pool) {
|
||||
if (tree == NULL) return false;
|
||||
mIsBuilding = true;
|
||||
|
||||
unsigned32 nodeCount = (unsigned32)tree->GetNodeCount();
|
||||
unsigned8 depth = tree->GetMaxLevel();
|
||||
|
||||
// Initialize the pool
|
||||
pool = std::vector<unsigned8>(GetPoolSize(tree));
|
||||
|
||||
// Acquire some information about the pool
|
||||
std::vector<size_t> parentsPerNode = tree->GetParentCounts();
|
||||
std::vector<bool> useVirtualNodes = DecideVirtualPointersPerLevel(tree, parentsPerNode);
|
||||
std::vector<unsigned8> pointerSizesPerLevel = CalculatePointerSizesPerLevel(tree, parentsPerNode, useVirtualNodes);
|
||||
std::vector<size_t> sizePerLevel = CalculateSizePerLevel(tree, pointerSizesPerLevel, parentsPerNode, useVirtualNodes);
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
std::vector<unsigned8> additionalBytesPerNode = tree->GetAdditionalBytesPerNode();
|
||||
std::vector<size_t> nodePointers(nodeCount);
|
||||
|
||||
// Calculate the level offsets
|
||||
std::vector<size_t> levelOffsets(depth + 1);
|
||||
size_t curIndex = CalculatePoolInfoSize(tree);
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
{
|
||||
levelOffsets[level] = curIndex;
|
||||
curIndex += sizePerLevel[level];
|
||||
}
|
||||
|
||||
// Calculate the node pointers for nodes in non-switch levels not using virtual nodes
|
||||
bool switchlevel = true;
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
{
|
||||
if (!switchlevel)
|
||||
{
|
||||
curIndex = levelOffsets[level];
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
const Node* node = tree->GetNode(i);
|
||||
if (node->GetLevel() == level)
|
||||
{
|
||||
nodePointers[i] = curIndex;
|
||||
curIndex += GetNormalNodeSize(tree, i, pointerSizesPerLevel, additionalBytesPerPointer, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (useVirtualNodes[level]) switchlevel = true;
|
||||
else if (switchlevel == true) switchlevel = false;
|
||||
}
|
||||
|
||||
|
||||
curIndex = 0;
|
||||
// Write the level offsets
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
BitHelper::SplitInBytesAndMove(levelOffsets[level], pool, level * 4, 4);
|
||||
curIndex += 4 * (depth + 1);
|
||||
|
||||
// Write the pointer sizes per level
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
pool[curIndex++] = pointerSizesPerLevel[level];
|
||||
|
||||
// Write 4 bytes indicating which levels use virtual nodes
|
||||
unsigned32 levelsUsingVirtualNodesMask = 0;
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
BitHelper::SetLS(levelsUsingVirtualNodesMask, level, useVirtualNodes[level]);
|
||||
BitHelper::SplitInBytesAndMove(levelsUsingVirtualNodesMask, pool, curIndex);
|
||||
curIndex += 4;
|
||||
|
||||
// Write additional bytes per node
|
||||
if (HasAdditionalBytesPerNode(tree))
|
||||
{
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
pool[curIndex++] = additionalBytesPerNode[level];
|
||||
}
|
||||
|
||||
// Write additional bytes per pointer
|
||||
if (HasAdditionalBytesPerPointer(tree))
|
||||
{
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
pool[curIndex++] = additionalBytesPerPointer[level];
|
||||
}
|
||||
|
||||
// Leave some space for the additional pool info.
|
||||
// As the actual node pointers are not yet known, we write them later
|
||||
size_t additionalTreeInfoStart = curIndex;
|
||||
curIndex += tree->GetAdditionalTreeInfoSize();
|
||||
|
||||
// Find all roots (to make sure we write all reachable nodes from any root).
|
||||
std::vector<NodeToWrite> nextLevelNodes;
|
||||
std::vector<NodeToWrite> thisLevelNodes;
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
if (tree->GetNode(i)->GetLevel() == 0) thisLevelNodes.push_back(NodeToWrite(i, 0, useVirtualNodes[0] ? FULL : NORMAL, 0, 0));
|
||||
size_t nextLevelIndex;
|
||||
BoolArray writtenNodes(nodeCount);
|
||||
|
||||
// Write the full node pool
|
||||
for (unsigned8 level = 0; level <= depth; level++)
|
||||
{
|
||||
assert(curIndex = levelOffsets[level]);
|
||||
nextLevelNodes.clear();
|
||||
nextLevelIndex = 0;
|
||||
unsigned8 additionalNodeBytes = additionalBytesPerNode[level];
|
||||
unsigned8 additionalBytesForPointersToThisLevel = level > 0 ? additionalBytesPerPointer[level - 1] : 0;
|
||||
unsigned8 additionalBytesForPointersFromThisLevel = additionalBytesPerPointer[level];
|
||||
size_t childFullNodeSize = GetFullNodeSize(tree, level + 1, pointerSizesPerLevel);
|
||||
size_t childVirtualNodeSize = GetVirtualNodeSize(tree, level + 1, pointerSizesPerLevel);
|
||||
for (NodeToWrite nodeInfo : thisLevelNodes)
|
||||
{
|
||||
const Node* node = nodeInfo.GetNode(tree);
|
||||
unsigned32 nodeId = nodeInfo.nodeId;
|
||||
assert(level == node->GetLevel());
|
||||
if (nodeInfo.type == VIRTUAL)
|
||||
{ // Write a virtual node
|
||||
size_t virtualNodeSize = GetVirtualNodeSize(tree, level, pointerSizesPerLevel);
|
||||
BitHelper::SplitInBytesAndMove(nodePointers[nodeId] - levelOffsets[level], pool, curIndex, virtualNodeSize);
|
||||
curIndex += virtualNodeSize;
|
||||
}
|
||||
else if (nodeInfo.type == FULL)
|
||||
{ // Write a full node
|
||||
assert(useVirtualNodes[level]);
|
||||
nodePointers[nodeId] = curIndex;
|
||||
|
||||
WriteFullNode(tree, nodeId, (unsigned32)nextLevelIndex, pointerSizesPerLevel[level], writtenNodes, additionalNodeBytes, pool, curIndex);
|
||||
// Tell the next level which nodes should be written and in what order
|
||||
unsigned8 vMask = pool[curIndex + 1 + additionalNodeBytes];
|
||||
size_t nextLevelNodesOffset = nextLevelNodes.size();
|
||||
for (ChildIndex c = 0; c < 8; c++)
|
||||
if (node->HasChild(c))
|
||||
{
|
||||
NodeType type;
|
||||
if (!useVirtualNodes[level + 1]) type = BitHelper::GetLS(vMask, c) ? VIRTUAL : NORMAL;
|
||||
else type = BitHelper::GetLS(vMask, c) ? VIRTUAL : FULL;
|
||||
nextLevelNodes.push_back(NodeToWrite(node->GetChildIndex(c), level + 1, type, nodeId, c));
|
||||
}
|
||||
// Calculate the size of those nodes in the next layer.
|
||||
for (auto c = nextLevelNodes.begin() + nextLevelNodesOffset; c != nextLevelNodes.end(); c++)
|
||||
{
|
||||
switch (c->type)
|
||||
{
|
||||
case NORMAL: nextLevelIndex += GetNormalNodeSize(tree, c->nodeId, pointerSizesPerLevel, additionalBytesPerPointer, true); break;
|
||||
case FULL: nextLevelIndex += childFullNodeSize; break;
|
||||
case VIRTUAL: nextLevelIndex += childVirtualNodeSize; break;
|
||||
}
|
||||
}
|
||||
nextLevelIndex += node->GetChildCount() * additionalBytesForPointersFromThisLevel;
|
||||
curIndex += GetFullNodeSize(tree, level, pointerSizesPerLevel);
|
||||
}
|
||||
else if (nodeInfo.type == NORMAL)
|
||||
{
|
||||
assert(nodePointers[nodeId] == 0 || nodePointers[nodeId] == curIndex);
|
||||
nodePointers[nodeId] = curIndex;
|
||||
WriteNormalNode(tree, nodeId, additionalNodeBytes, pointerSizesPerLevel[level], nodePointers, level == depth ? 0 : levelOffsets[level + 1], pool, curIndex);
|
||||
curIndex += GetNormalNodeSize(tree, nodeId, pointerSizesPerLevel, additionalBytesPerPointer, false);
|
||||
// Write additional bytes per pointer
|
||||
for (ChildIndex c = 0; c < 8; c++)
|
||||
{
|
||||
if (node->HasChild(c))
|
||||
{
|
||||
WriteAdditionalPointerInfo(tree, nodeId, c, additionalBytesForPointersFromThisLevel, pool, curIndex);
|
||||
curIndex += additionalBytesForPointersFromThisLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (level > 0 && additionalBytesForPointersToThisLevel != 0 && useVirtualNodes[level - 1])
|
||||
{
|
||||
WriteAdditionalPointerInfo(tree, nodeInfo.parentId, nodeInfo.childIndexOfParent, additionalBytesForPointersToThisLevel, pool, curIndex);
|
||||
curIndex += additionalBytesForPointersToThisLevel;
|
||||
}
|
||||
}
|
||||
if (useVirtualNodes[level])
|
||||
thisLevelNodes = nextLevelNodes;
|
||||
else
|
||||
{
|
||||
thisLevelNodes.clear();
|
||||
for (unsigned32 i = 0; i < nodeCount; i++)
|
||||
{
|
||||
const Node* node = tree->GetNode(i);
|
||||
if (node->GetLevel() == level + 1)
|
||||
thisLevelNodes.push_back(NodeToWrite(i, level + 1, NORMAL, 0, 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
std::vector<unsigned8> additionalTreeInfo = tree->GetAdditionalTreeInfo(nodePointers);
|
||||
std::move(additionalTreeInfo.begin(), additionalTreeInfo.end(), pool.begin() + additionalTreeInfoStart);
|
||||
mIsBuilding = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
void VirtualNodePoolBuilder::WriteFullNode(const BaseTree* tree, const unsigned32& nodeId, const unsigned32& childPointer, const unsigned8& childPointerSize, BoolArray& writtenNodes,
|
||||
const unsigned8& additionalNodeBytes, std::vector<unsigned8>& pool, const size_t& offset) const
|
||||
{
|
||||
size_t curIndex = offset;
|
||||
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
pool[curIndex++] = node->GetChildmask().mask;
|
||||
|
||||
// Write additional node info (if any)
|
||||
if (additionalNodeBytes != 0)
|
||||
{
|
||||
auto nodeBytes = tree->GetAdditionalNodeBytes(node);
|
||||
std::move(nodeBytes.begin(), nodeBytes.end(), pool.begin() + curIndex);
|
||||
assert(nodeBytes.size() == additionalNodeBytes);
|
||||
curIndex += additionalNodeBytes;
|
||||
}
|
||||
|
||||
// Build the "virtual mask" indicating which nodes have already been written in the next level and are virtual in this level
|
||||
pool[curIndex++] = GetVMask(tree, nodeId, writtenNodes);
|
||||
|
||||
// Write the pointer to the first child
|
||||
BitHelper::SplitInBytesAndMove(childPointer, pool, curIndex, childPointerSize);
|
||||
curIndex += 4;
|
||||
}
|
||||
|
||||
void VirtualNodePoolBuilder::WriteNormalNode(const BaseTree* tree, const unsigned32& nodeId, const unsigned8& additionalNodeBytes, const unsigned8& pointerSize, const std::vector<size_t>& nodePointers, const size_t& nextLevelOffset, std::vector<unsigned8>& pool, const size_t& offset) const
|
||||
{
|
||||
size_t curIndex = offset;
|
||||
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
pool[curIndex++] = node->GetChildmask().mask;
|
||||
|
||||
// Write additional node info (if any)
|
||||
if (additionalNodeBytes != 0)
|
||||
{
|
||||
auto nodeBytes = tree->GetAdditionalNodeBytes(node);
|
||||
std::move(nodeBytes.begin(), nodeBytes.end(), pool.begin() + curIndex);
|
||||
assert(nodeBytes.size() == additionalNodeBytes);
|
||||
curIndex += additionalNodeBytes;
|
||||
}
|
||||
|
||||
// Write the child pointers
|
||||
unsigned32* children = node->GetChildren();
|
||||
for (ChildIndex c = 0; c < node->GetChildCount(); c++)
|
||||
{
|
||||
unsigned32 child = children[c];
|
||||
size_t pointer = nodePointers[child] - nextLevelOffset;
|
||||
BitHelper::SplitInBytesAndMove(pointer, pool, curIndex, pointerSize);
|
||||
curIndex += pointerSize;
|
||||
}
|
||||
}
|
||||
|
||||
void VirtualNodePoolBuilder::WriteAdditionalPointerInfo(const BaseTree* tree, const unsigned32& nodeId, const ChildIndex& childId, const unsigned8& additionalPointerBytes, std::vector<unsigned8>& pool, const size_t& offset) const
|
||||
{
|
||||
auto pointerInfo = tree->GetAdditionalPointerBytes(tree->GetNode(nodeId), childId);
|
||||
std::move(pointerInfo.begin(), pointerInfo.end(), pool.begin() + offset);
|
||||
}
|
||||
|
||||
unsigned8 VirtualNodePoolBuilder::GetVMask(const BaseTree* tree, const unsigned32& nodeId, BoolArray& writtenNodes) const
|
||||
{
|
||||
const Node* node = tree->GetNode(nodeId);
|
||||
unsigned8 vMask = 0;
|
||||
for (ChildIndex c = 0; c < 8; c++)
|
||||
{
|
||||
if (node->HasChild(c))
|
||||
{
|
||||
unsigned32 childIndex = node->GetChildIndex(c);
|
||||
// A node is virtual if it has been written before
|
||||
BitHelper::SetLS(vMask, c, writtenNodes[childIndex]);
|
||||
writtenNodes.Set(childIndex, true);
|
||||
}
|
||||
}
|
||||
return vMask;
|
||||
}
|
||||
|
||||
bool VirtualNodePoolBuilder::VerifyPool(std::vector<unsigned8>& pool, const unsigned8& treeDepth) const
|
||||
{
|
||||
// TODO: Do some verification here.
|
||||
return true;
|
||||
}
|
||||
88
Research/scene/PoolBuilder/VirtualNodePoolBuilder.h
Normal file
88
Research/scene/PoolBuilder/VirtualNodePoolBuilder.h
Normal file
@@ -0,0 +1,88 @@
|
||||
#pragma once
|
||||
#include <vector>
|
||||
#include "BasePoolBuilder.h"
|
||||
#include "../Octree/BaseTree.h"
|
||||
|
||||
class BoolArray;
|
||||
|
||||
// Virtual Node Pool is a node pool in which the children of a node are stored consequetively in memory.
|
||||
// Nodes that are reused (because of the DAG structure) will appear as so-called "virtual nodes".
|
||||
// These nodes are basically only a pointer to the actual node and do appear consecutively in memory to the ordinary nodes.
|
||||
// This has the advantage that only 1 pointer is needed per node (a pointer to the first child).
|
||||
class VirtualNodePoolBuilder : public BasePoolBuilder<BaseTree>
|
||||
{
|
||||
public:
|
||||
VirtualNodePoolBuilder() { mIsBuilding = false; }
|
||||
virtual ~VirtualNodePoolBuilder() override {}
|
||||
|
||||
std::string GetFullFileName(const std::string& fileName) const override;
|
||||
size_t GetPoolSize(const BaseTree* tree) override;
|
||||
bool BuildPool(const BaseTree* tree, std::vector<unsigned8>& pool) override;
|
||||
bool VerifyPool(std::vector<unsigned8>& pool, const unsigned8& depth) const override;
|
||||
private:
|
||||
// Returns the size of a node without the pointers (but with the additional pointer information if needed)
|
||||
std::vector<unsigned8> CalculatePointerSizesPerLevel(const BaseTree* tree, const std::vector<size_t>& parentsPerNode, const std::vector<bool>& useVirtualNodes) const;
|
||||
std::vector<size_t> CalculateVirtualNodesPerLevel(const BaseTree* tree, const std::vector<size_t>& parentsPerNode) const;
|
||||
std::vector<size_t> CalculateFullNodesPerLevel(const BaseTree* tree) const;
|
||||
std::vector<size_t> CalculatePointersToPerLevel(const BaseTree* tree) const;
|
||||
size_t CalculateSizeOfLevel(const BaseTree* tree, const unsigned8& level,
|
||||
const size_t& virtualNodesThisLevel, const size_t& fullNodesThisLevel, const size_t& pointersToThisLevel, const size_t& pointersFromThisLevel,
|
||||
const unsigned8& pointerSizeToThisLevel, const unsigned8& pointerSizeFromThisLevel,
|
||||
const unsigned8& additionalBytesPointersToThisLevel, const unsigned8& additionalBytesPointersFromThisLevel,
|
||||
const bool& useVirtualNodesThisLevel, const bool& useVirtualNodesPreviousLevel) const;
|
||||
std::vector<size_t> CalculateSizePerLevel(const BaseTree* tree, const std::vector<unsigned8> pointerSizesPerLevel, const std::vector<size_t>& parentsPerNode, const std::vector<bool>& usesVirtualNodes) const;
|
||||
std::vector<size_t> CalculateApproximateSizePerLevelVirtualNodes(const BaseTree* tree, const std::vector<size_t>& parentsPerNode) const;
|
||||
std::vector<size_t> CalculateApproximateSizePerLevelStandardNodes(const BaseTree* tree) const;
|
||||
std::vector<bool> DecideVirtualPointersPerLevel(const BaseTree* tree, const std::vector<size_t>& parentsPerNode) const;
|
||||
size_t CalculatePoolInfoSize(const BaseTree* tree);
|
||||
|
||||
size_t GetFullNodeSize(const BaseTree* tree, const unsigned8& level, const unsigned8& pointerSize) const;
|
||||
size_t GetVirtualNodeSize(const BaseTree* tree, const unsigned8& level, const unsigned8& pointerSize) const;
|
||||
size_t GetFullNodeSize(const BaseTree* tree, const unsigned8& level, const std::vector<unsigned8>& pointerSizesPerLevel) const;
|
||||
size_t GetVirtualNodeSize(const BaseTree* tree, const unsigned8& level, const std::vector<unsigned8>& pointerSizesPerLevel) const;
|
||||
size_t GetNormalNodeSize(const BaseTree* tree, const unsigned32& nodeId, const std::vector<unsigned8>& pointerSizesPerLevel, std::vector<unsigned8>& additionalPointerInfoSizesPerLevel, const bool& includingAdditionalPointerInfo) const;
|
||||
|
||||
inline bool HasAdditionalBytesPerNode(const BaseTree* tree) const
|
||||
{
|
||||
std::vector<unsigned8> additionalBytesPerNode = tree->GetAdditionalBytesPerNode();
|
||||
bool hasAdditionalBytesPerNode = false; for (auto abpn : additionalBytesPerNode) if (abpn != 0) hasAdditionalBytesPerNode = true;
|
||||
return hasAdditionalBytesPerNode;
|
||||
}
|
||||
|
||||
inline bool HasAdditionalBytesPerPointer(const BaseTree* tree) const
|
||||
{
|
||||
std::vector<unsigned8> additionalBytesPerPointer = tree->GetAdditionalBytesPerPointer();
|
||||
bool hasAdditionalBytesPerPointer = false; for (auto abpp : additionalBytesPerPointer) if (abpp != 0) hasAdditionalBytesPerPointer = true;
|
||||
return hasAdditionalBytesPerPointer;
|
||||
}
|
||||
|
||||
void WriteFullNode(const BaseTree* tree, const unsigned32& nodeId, const unsigned32& childPointer, const unsigned8& childPointerSize, BoolArray& writtenNodes,
|
||||
const unsigned8& additionalNodeBytes, std::vector<unsigned8>& pool, const size_t& offset) const;
|
||||
|
||||
void WriteNormalNode(const BaseTree* tree, const unsigned32& nodeId, const unsigned8& additionalNodeBytes, const unsigned8& pointerSize, const std::vector<size_t>& nodePointers, const size_t& nextLevelOffset, std::vector<unsigned8>& pool, const size_t& offset) const;
|
||||
|
||||
void WriteAdditionalPointerInfo(const BaseTree* tree, const unsigned32& nodeId, const ChildIndex& childId, const unsigned8& additionalPointerBytes, std::vector<unsigned8>& pool, const size_t& offset) const;
|
||||
|
||||
unsigned8 GetVMask(const BaseTree* tree, const unsigned32& nodeId, BoolArray& writtenNodes) const;
|
||||
|
||||
bool mIsBuilding;
|
||||
|
||||
enum NodeType
|
||||
{
|
||||
VIRTUAL, FULL, NORMAL
|
||||
};
|
||||
|
||||
struct NodeToWrite
|
||||
{
|
||||
unsigned32 nodeId;
|
||||
unsigned8 level;
|
||||
NodeType type;
|
||||
ChildIndex childIndexOfParent;
|
||||
unsigned32 parentId;
|
||||
|
||||
NodeToWrite(const unsigned32& nodeId, const unsigned8& level, const NodeType& type, const unsigned32& parentId, const ChildIndex childOfParent)
|
||||
: nodeId(nodeId), level(level), type(type), childIndexOfParent(childOfParent), parentId(parentId) {}
|
||||
|
||||
const Node* GetNode(const BaseTree* tree) { return tree->GetNode(nodeId); }
|
||||
};
|
||||
};
|
||||
Reference in New Issue
Block a user