Make sure fastfile offset is taken after alignment, marking following is done on the originally written data and writing partially uses the original data when writing dynamic sized data

too lazy to split into single commits
This commit is contained in:
Jan
2021-03-19 15:09:44 +01:00
parent 5443ed4261
commit 2f700a48a9
6 changed files with 145 additions and 40 deletions

View File

@ -19,19 +19,26 @@ public:
virtual void IncBlockPos(size_t size) = 0;
virtual void WriteNullTerminated(const void* dst) = 0;
virtual bool ReusableShouldWrite(void** pPtr, size_t size, size_t count, std::type_index type) = 0;
virtual bool ReusableShouldWrite(void** pPtr, size_t size, std::type_index type) = 0;
virtual void ReusableAddOffset(void* ptr, size_t size, size_t count, std::type_index type) = 0;
virtual void MarkFollowing(void** pPtr) = 0;
template<typename T>
bool ReusableShouldWrite(T** pPtr)
{
return ReusableShouldWrite(reinterpret_cast<void**>(reinterpret_cast<uintptr_t>(pPtr)), sizeof(T), 1, std::type_index(typeid(T)));
return ReusableShouldWrite(reinterpret_cast<void**>(reinterpret_cast<uintptr_t>(pPtr)), sizeof(T), std::type_index(typeid(T)));
}
template<typename T>
bool ReusableShouldWrite(T** pPtr, const size_t count)
void ReusableAddOffset(T* ptr)
{
return ReusableShouldWrite(reinterpret_cast<void**>(reinterpret_cast<uintptr_t>(pPtr)), sizeof(T), count, std::type_index(typeid(T)));
ReusableAddOffset(const_cast<void*>(reinterpret_cast<const void*>(ptr)), sizeof(T), 1, std::type_index(typeid(T)));
}
template<typename T>
void ReusableAddOffset(T* ptr, const size_t count)
{
ReusableAddOffset(const_cast<void*>(reinterpret_cast<const void*>(ptr)), sizeof(T), count, std::type_index(typeid(T)));
}
template<typename T>

View File

@ -170,7 +170,7 @@ void InMemoryZoneOutputStream::MarkFollowing(void** pPtr)
*pPtr = m_block_stack.top()->m_type == XBlock::Type::BLOCK_TYPE_TEMP ? PTR_INSERT : PTR_FOLLOWING;
}
bool InMemoryZoneOutputStream::ReusableShouldWrite(void** pPtr, const size_t entrySize, const size_t entryCount, std::type_index type)
bool InMemoryZoneOutputStream::ReusableShouldWrite(void** pPtr, const size_t entrySize, const std::type_index type)
{
assert(!m_block_stack.empty());
assert(pPtr != nullptr);
@ -178,15 +178,9 @@ bool InMemoryZoneOutputStream::ReusableShouldWrite(void** pPtr, const size_t ent
if (*pPtr == nullptr)
return false;
const auto inTemp = m_block_stack.top()->m_type == XBlock::Type::BLOCK_TYPE_TEMP;
const auto foundEntriesForType = m_reusable_entries.find(type);
if (foundEntriesForType == m_reusable_entries.end())
{
std::vector<ReusableEntry> entries;
auto zoneOffset = inTemp ? InsertPointer() : GetCurrentZonePointer();
entries.emplace_back(*pPtr, entrySize, entryCount, zoneOffset);
m_reusable_entries.emplace(std::make_pair(type, std::move(entries)));
return true;
}
@ -199,9 +193,25 @@ bool InMemoryZoneOutputStream::ReusableShouldWrite(void** pPtr, const size_t ent
return false;
}
}
auto zoneOffset = inTemp ? InsertPointer() : GetCurrentZonePointer();
foundEntriesForType->second.emplace_back(*pPtr, entrySize, entryCount, zoneOffset);
return true;
}
void InMemoryZoneOutputStream::ReusableAddOffset(void* ptr, size_t size, size_t count, std::type_index type)
{
assert(!m_block_stack.empty());
const auto inTemp = m_block_stack.top()->m_type == XBlock::Type::BLOCK_TYPE_TEMP;
auto zoneOffset = inTemp ? InsertPointer() : GetCurrentZonePointer();
const auto foundEntriesForType = m_reusable_entries.find(type);
if (foundEntriesForType == m_reusable_entries.end())
{
std::vector<ReusableEntry> entries;
entries.emplace_back(ptr, size, count, zoneOffset);
m_reusable_entries.emplace(std::make_pair(type, std::move(entries)));
}
else
{
foundEntriesForType->second.emplace_back(ptr, size, count, zoneOffset);
}
}

View File

@ -46,5 +46,6 @@ public:
void IncBlockPos(size_t size) override;
void WriteNullTerminated(const void* src) override;
void MarkFollowing(void** pPtr) override;
bool ReusableShouldWrite(void** pPtr, size_t entrySize, size_t entryCount, std::type_index type) override;
bool ReusableShouldWrite(void** pPtr, size_t entrySize, std::type_index type) override;
void ReusableAddOffset(void* ptr, size_t size, size_t count, std::type_index type) override;
};