summaryrefslogtreecommitdiffstats
path: root/source/WorldStorage
diff options
context:
space:
mode:
Diffstat (limited to 'source/WorldStorage')
-rw-r--r--source/WorldStorage/FastNBT.cpp547
-rw-r--r--source/WorldStorage/FastNBT.h293
-rw-r--r--source/WorldStorage/NBTChunkSerializer.cpp533
-rw-r--r--source/WorldStorage/NBTChunkSerializer.h116
-rw-r--r--source/WorldStorage/WSSAnvil.cpp1555
-rw-r--r--source/WorldStorage/WSSAnvil.h184
-rw-r--r--source/WorldStorage/WSSCompact.cpp1009
-rw-r--r--source/WorldStorage/WSSCompact.h144
-rw-r--r--source/WorldStorage/WorldStorage.cpp409
-rw-r--r--source/WorldStorage/WorldStorage.h135
10 files changed, 0 insertions, 4925 deletions
diff --git a/source/WorldStorage/FastNBT.cpp b/source/WorldStorage/FastNBT.cpp
deleted file mode 100644
index e55011069..000000000
--- a/source/WorldStorage/FastNBT.cpp
+++ /dev/null
@@ -1,547 +0,0 @@
-
-// FastNBT.cpp
-
-// Implements the fast NBT parser and writer
-
-#include "Globals.h"
-#include "FastNBT.h"
-
-
-
-
-
-// The number of NBT tags that are reserved when an NBT parsing is started.
-// You can override this by using a cmdline define
-#ifndef NBT_RESERVE_SIZE
- #define NBT_RESERVE_SIZE 200
-#endif // NBT_RESERVE_SIZE
-
-#define RETURN_FALSE_IF_FALSE(X) do { if (!X) return false; } while (0)
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cParsedNBT:
-
-#define NEEDBYTES(N) \
- if (m_Length - m_Pos < N) \
- { \
- return false; \
- }
-
-
-
-
-
-cParsedNBT::cParsedNBT(const char * a_Data, int a_Length) :
- m_Data(a_Data),
- m_Length(a_Length),
- m_Pos(0)
-{
- m_IsValid = Parse();
-}
-
-
-
-
-
-bool cParsedNBT::Parse(void)
-{
- if (m_Length < 3)
- {
- // Data too short
- return false;
- }
- if (m_Data[0] != TAG_Compound)
- {
- // The top-level tag must be a Compound
- return false;
- }
-
- m_Tags.reserve(NBT_RESERVE_SIZE);
-
- m_Tags.push_back(cFastNBTTag(TAG_Compound, -1));
-
- m_Pos = 1;
-
- RETURN_FALSE_IF_FALSE(ReadString(m_Tags.back().m_NameStart, m_Tags.back().m_NameLength));
- RETURN_FALSE_IF_FALSE(ReadCompound());
-
- return true;
-}
-
-
-
-
-
-bool cParsedNBT::ReadString(int & a_StringStart, int & a_StringLen)
-{
- NEEDBYTES(2);
- a_StringStart = m_Pos + 2;
- a_StringLen = ntohs(*((short *)(m_Data + m_Pos)));
- if (a_StringLen < 0)
- {
- // Invalid string length
- return false;
- }
- m_Pos += 2 + a_StringLen;
- return true;
-}
-
-
-
-
-
-bool cParsedNBT::ReadCompound(void)
-{
- // Reads the latest tag as a compound
- int ParentIdx = m_Tags.size() - 1;
- int PrevSibling = -1;
- while (true)
- {
- NEEDBYTES(1);
- eTagType TagType = (eTagType)(m_Data[m_Pos]);
- m_Pos++;
- if (TagType == TAG_End)
- {
- break;
- }
- m_Tags.push_back(cFastNBTTag(TagType, ParentIdx, PrevSibling));
- if (PrevSibling >= 0)
- {
- m_Tags[PrevSibling].m_NextSibling = m_Tags.size() - 1;
- }
- else
- {
- m_Tags[ParentIdx].m_FirstChild = m_Tags.size() - 1;
- }
- PrevSibling = m_Tags.size() - 1;
- RETURN_FALSE_IF_FALSE(ReadString(m_Tags.back().m_NameStart, m_Tags.back().m_NameLength));
- RETURN_FALSE_IF_FALSE(ReadTag());
- } // while (true)
- m_Tags[ParentIdx].m_LastChild = PrevSibling;
- return true;
-}
-
-
-
-
-
-bool cParsedNBT::ReadList(eTagType a_ChildrenType)
-{
- // Reads the latest tag as a list of items of type a_ChildrenType
-
- // Read the count:
- NEEDBYTES(4);
- int Count = ntohl(*((int *)(m_Data + m_Pos)));
- m_Pos += 4;
- if (Count < 0)
- {
- return false;
- }
-
- // Read items:
- int ParentIdx = m_Tags.size() - 1;
- int PrevSibling = -1;
- for (int i = 0; i < Count; i++)
- {
- m_Tags.push_back(cFastNBTTag(a_ChildrenType, ParentIdx, PrevSibling));
- if (PrevSibling >= 0)
- {
- m_Tags[PrevSibling].m_NextSibling = m_Tags.size() - 1;
- }
- else
- {
- m_Tags[ParentIdx].m_FirstChild = m_Tags.size() - 1;
- }
- PrevSibling = m_Tags.size() - 1;
- RETURN_FALSE_IF_FALSE(ReadTag());
- } // for (i)
- m_Tags[ParentIdx].m_LastChild = PrevSibling;
- return true;
-}
-
-
-
-
-
-#define CASE_SIMPLE_TAG(TAGTYPE, LEN) \
- case TAG_##TAGTYPE: \
- { \
- NEEDBYTES(LEN); \
- Tag.m_DataStart = m_Pos; \
- Tag.m_DataLength = LEN; \
- m_Pos += LEN; \
- return true; \
- }
-
-bool cParsedNBT::ReadTag(void)
-{
- cFastNBTTag & Tag = m_Tags.back();
- switch (Tag.m_Type)
- {
- CASE_SIMPLE_TAG(Byte, 1)
- CASE_SIMPLE_TAG(Short, 2)
- CASE_SIMPLE_TAG(Int, 4)
- CASE_SIMPLE_TAG(Long, 8)
- CASE_SIMPLE_TAG(Float, 4)
- CASE_SIMPLE_TAG(Double, 8)
-
- case TAG_String:
- {
- return ReadString(Tag.m_DataStart, Tag.m_DataLength);
- }
-
- case TAG_ByteArray:
- {
- NEEDBYTES(4);
- int len = ntohl(*((int *)(m_Data + m_Pos)));
- m_Pos += 4;
- if (len < 0)
- {
- // Invalid length
- return false;
- }
- NEEDBYTES(len);
- Tag.m_DataLength = len;
- Tag.m_DataStart = m_Pos;
- m_Pos += len;
- return true;
- }
-
- case TAG_List:
- {
- NEEDBYTES(1);
- eTagType ItemType = (eTagType)m_Data[m_Pos];
- m_Pos++;
- RETURN_FALSE_IF_FALSE(ReadList(ItemType));
- return true;
- }
-
- case TAG_Compound:
- {
- RETURN_FALSE_IF_FALSE(ReadCompound());
- return true;
- }
-
- case TAG_IntArray:
- {
- NEEDBYTES(4);
- int len = ntohl(*((int *)(m_Data + m_Pos)));
- m_Pos += 4;
- if (len < 0)
- {
- // Invalid length
- return false;
- }
- len *= 4;
- NEEDBYTES(len);
- Tag.m_DataLength = len;
- Tag.m_DataStart = m_Pos;
- m_Pos += len;
- return true;
- }
-
- default:
- {
- ASSERT(!"Unhandled NBT tag type");
- return false;
- }
- } // switch (iType)
-}
-
-#undef CASE_SIMPLE_TAG
-
-
-
-
-
-int cParsedNBT::FindChildByName(int a_Tag, const char * a_Name, size_t a_NameLength) const
-{
- if (a_Tag < 0)
- {
- return -1;
- }
- if (m_Tags[a_Tag].m_Type != TAG_Compound)
- {
- return -1;
- }
-
- if (a_NameLength == 0)
- {
- a_NameLength = strlen(a_Name);
- }
- for (int Child = m_Tags[a_Tag].m_FirstChild; Child != -1; Child = m_Tags[Child].m_NextSibling)
- {
- if (
- (m_Tags[Child].m_NameLength == a_NameLength) &&
- (memcmp(m_Data + m_Tags[Child].m_NameStart, a_Name, a_NameLength) == 0)
- )
- {
- return Child;
- }
- } // for Child - children of a_Tag
- return -1;
-}
-
-
-
-
-
-int cParsedNBT::FindTagByPath(int a_Tag, const AString & a_Path) const
-{
- if (a_Tag < 0)
- {
- return -1;
- }
- size_t Begin = 0;
- size_t Length = a_Path.length();
- int Tag = a_Tag;
- for (size_t i = 0; i < Length; i++)
- {
- if (a_Path[i] != '\\')
- {
- continue;
- }
- Tag = FindChildByName(Tag, a_Path.c_str() + Begin, i - Begin - 1);
- if (Tag < 0)
- {
- return -1;
- }
- Begin = i + 1;
- } // for i - a_Path[]
-
- if (Begin < Length)
- {
- Tag = FindChildByName(Tag, a_Path.c_str() + Begin, Length - Begin);
- }
- return Tag;
-}
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cFastNBTWriter:
-
-cFastNBTWriter::cFastNBTWriter(const AString & a_RootTagName) :
- m_CurrentStack(0)
-{
- m_Stack[0].m_Type = TAG_Compound;
- m_Result.reserve(100 * 1024);
- m_Result.push_back(TAG_Compound);
- WriteString(a_RootTagName.data(), a_RootTagName.size());
-}
-
-
-
-
-
-void cFastNBTWriter::BeginCompound(const AString & a_Name)
-{
- if (m_CurrentStack >= MAX_STACK)
- {
- ASSERT(!"Stack overflow");
- return;
- }
-
- TagCommon(a_Name, TAG_Compound);
-
- ++m_CurrentStack;
- m_Stack[m_CurrentStack].m_Type = TAG_Compound;
-}
-
-
-
-
-
-void cFastNBTWriter::EndCompound(void)
-{
- ASSERT(m_CurrentStack > 0);
- ASSERT(IsStackTopCompound());
-
- m_Result.push_back(TAG_End);
- --m_CurrentStack;
-}
-
-
-
-
-
-void cFastNBTWriter::BeginList(const AString & a_Name, eTagType a_ChildrenType)
-{
- if (m_CurrentStack >= MAX_STACK)
- {
- ASSERT(!"Stack overflow");
- return;
- }
-
- TagCommon(a_Name, TAG_List);
-
- m_Result.push_back((char)a_ChildrenType);
- m_Result.append(4, (char)0);
-
- ++m_CurrentStack;
- m_Stack[m_CurrentStack].m_Type = TAG_List;
- m_Stack[m_CurrentStack].m_Pos = m_Result.size() - 4;
- m_Stack[m_CurrentStack].m_Count = 0;
- m_Stack[m_CurrentStack].m_ItemType = a_ChildrenType;
-}
-
-
-
-
-
-void cFastNBTWriter::EndList(void)
-{
- ASSERT(m_CurrentStack > 0);
- ASSERT(m_Stack[m_CurrentStack].m_Type == TAG_List);
-
- // Update the list count:
- *((int *)(m_Result.c_str() + m_Stack[m_CurrentStack].m_Pos)) = htonl(m_Stack[m_CurrentStack].m_Count);
-
- --m_CurrentStack;
-}
-
-
-
-
-
-void cFastNBTWriter::AddByte(const AString & a_Name, unsigned char a_Value)
-{
- TagCommon(a_Name, TAG_Byte);
- m_Result.push_back(a_Value);
-}
-
-
-
-
-
-void cFastNBTWriter::AddShort(const AString & a_Name, Int16 a_Value)
-{
- TagCommon(a_Name, TAG_Short);
- Int16 Value = htons(a_Value);
- m_Result.append((const char *)&Value, 2);
-}
-
-
-
-
-
-void cFastNBTWriter::AddInt(const AString & a_Name, Int32 a_Value)
-{
- TagCommon(a_Name, TAG_Int);
- Int32 Value = htonl(a_Value);
- m_Result.append((const char *)&Value, 4);
-}
-
-
-
-
-
-void cFastNBTWriter::AddLong(const AString & a_Name, Int64 a_Value)
-{
- TagCommon(a_Name, TAG_Long);
- Int64 Value = HostToNetwork8(&a_Value);
- m_Result.append((const char *)&Value, 8);
-}
-
-
-
-
-
-void cFastNBTWriter::AddFloat(const AString & a_Name, float a_Value)
-{
- TagCommon(a_Name, TAG_Float);
- Int32 Value = HostToNetwork4(&a_Value);
- m_Result.append((const char *)&Value, 4);
-}
-
-
-
-
-
-void cFastNBTWriter::AddDouble(const AString & a_Name, double a_Value)
-{
- TagCommon(a_Name, TAG_Double);
- Int64 Value = HostToNetwork8(&a_Value);
- m_Result.append((const char *)&Value, 8);
-}
-
-
-
-
-
-void cFastNBTWriter::AddString(const AString & a_Name, const AString & a_Value)
-{
- TagCommon(a_Name, TAG_String);
- Int16 len = htons((short)(a_Value.size()));
- m_Result.append((const char *)&len, 2);
- m_Result.append(a_Value.c_str(), a_Value.size());
-}
-
-
-
-
-
-void cFastNBTWriter::AddByteArray(const AString & a_Name, const char * a_Value, size_t a_NumElements)
-{
- TagCommon(a_Name, TAG_ByteArray);
- Int32 len = htonl(a_NumElements);
- m_Result.append((const char *)&len, 4);
- m_Result.append(a_Value, a_NumElements);
-}
-
-
-
-
-
-void cFastNBTWriter::AddIntArray(const AString & a_Name, const int * a_Value, size_t a_NumElements)
-{
- TagCommon(a_Name, TAG_IntArray);
- Int32 len = htonl(a_NumElements);
- m_Result.append((const char *)&len, 4);
-#if defined(ANDROID_NDK)
- // Android has alignment issues - cannot byteswap (htonl) an int that is not 32-bit-aligned, which happens in the regular version
- for (size_t i = 0; i < a_NumElements; i++)
- {
- int Element = htonl(a_Value[i]);
- m_Result.append((const char *)&Element, 4);
- }
-#else
- int * Elements = (int *)(m_Result.data() + m_Result.size());
- m_Result.append(a_NumElements * 4, (char)0);
- for (size_t i = 0; i < a_NumElements; i++)
- {
- Elements[i] = htonl(a_Value[i]);
- }
-#endif
-}
-
-
-
-
-
-void cFastNBTWriter::Finish(void)
-{
- ASSERT(m_CurrentStack == 0);
- m_Result.push_back(TAG_End);
-}
-
-
-
-
-
-void cFastNBTWriter::WriteString(const char * a_Data, short a_Length)
-{
- Int16 Len = htons(a_Length);
- m_Result.append((const char *)&Len, 2);
- m_Result.append(a_Data, a_Length);
-}
-
-
-
-
diff --git a/source/WorldStorage/FastNBT.h b/source/WorldStorage/FastNBT.h
deleted file mode 100644
index 7323c29cb..000000000
--- a/source/WorldStorage/FastNBT.h
+++ /dev/null
@@ -1,293 +0,0 @@
-
-// FastNBT.h
-
-// Interfaces to the fast NBT parser and writer
-
-/*
-The fast parser parses the data into a vector of cFastNBTTag structures. These structures describe the NBT tree,
-but themselves are allocated in a vector, thus minimizing reallocation.
-The structures have a minimal constructor, setting all member "pointers" to "invalid".
-
-The fast writer doesn't need a NBT tree structure built beforehand, it is commanded to open, append and close tags
-(just like XML); it keeps the internal tag stack and reports errors in usage.
-It directly outputs a string containing the serialized NBT data.
-*/
-
-
-
-
-
-#pragma once
-
-#include "../Endianness.h"
-
-
-
-
-
-enum eTagType
-{
- TAG_Min = 0, // The minimum value for a tag type
- TAG_End = 0,
- TAG_Byte = 1,
- TAG_Short = 2,
- TAG_Int = 3,
- TAG_Long = 4,
- TAG_Float = 5,
- TAG_Double = 6,
- TAG_ByteArray = 7,
- TAG_String = 8,
- TAG_List = 9,
- TAG_Compound = 10,
- TAG_IntArray = 11,
- TAG_Max = 11, // The maximum value for a tag type
-} ;
-
-
-
-
-
-/** This structure is used for all NBT tags.
-It contains indices to the parent array of tags, building the NBT tree this way.
-Also contains indices into the data stream being parsed, used for values;
-NO dynamically allocated memory is used!
-Structure (all with the tree structure it describes) supports moving in memory (std::vector reallocation)
-*/
-struct cFastNBTTag
-{
-public:
-
- eTagType m_Type;
-
- // The following members are indices into the data stream. m_DataLength == 0 if no data available
- // They must not be pointers, because the datastream may be copied into another AString object in the meantime.
- int m_NameStart;
- int m_NameLength;
- int m_DataStart;
- int m_DataLength;
-
- // The following members are indices into the array returned; -1 if not valid
- // They must not be pointers, because pointers would not survive std::vector reallocation
- int m_Parent;
- int m_PrevSibling;
- int m_NextSibling;
- int m_FirstChild;
- int m_LastChild;
-
- cFastNBTTag(eTagType a_Type, int a_Parent) :
- m_Type(a_Type),
- m_NameLength(0),
- m_DataLength(0),
- m_Parent(a_Parent),
- m_PrevSibling(-1),
- m_NextSibling(-1),
- m_FirstChild(-1),
- m_LastChild(-1)
- {
- }
-
- cFastNBTTag(eTagType a_Type, int a_Parent, int a_PrevSibling) :
- m_Type(a_Type),
- m_NameLength(0),
- m_DataLength(0),
- m_Parent(a_Parent),
- m_PrevSibling(a_PrevSibling),
- m_NextSibling(-1),
- m_FirstChild(-1),
- m_LastChild(-1)
- {
- }
-} ;
-
-
-
-
-
-/** Parses and contains the parsed data
-Also implements data accessor functions for tree traversal and value getters
-The data pointer passed in the constructor is assumed to be valid throughout the object's life. Care must be taken not to initialize from a temporary.
-*/
-class cParsedNBT
-{
-public:
- cParsedNBT(const char * a_Data, int a_Length);
-
- bool IsValid(void) const {return m_IsValid; }
-
- int GetRoot(void) const {return 0; }
- int GetFirstChild (int a_Tag) const { return m_Tags[a_Tag].m_FirstChild; }
- int GetLastChild (int a_Tag) const { return m_Tags[a_Tag].m_LastChild; }
- int GetNextSibling(int a_Tag) const { return m_Tags[a_Tag].m_NextSibling; }
- int GetPrevSibling(int a_Tag) const { return m_Tags[a_Tag].m_PrevSibling; }
- int GetDataLength (int a_Tag) const { return m_Tags[a_Tag].m_DataLength; }
-
- const char * GetData(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type != TAG_List);
- ASSERT(m_Tags[a_Tag].m_Type != TAG_Compound);
- return m_Data + m_Tags[a_Tag].m_DataStart;
- }
-
- int FindChildByName(int a_Tag, const AString & a_Name) const
- {
- return FindChildByName(a_Tag, a_Name.c_str(), a_Name.length());
- }
-
- int FindChildByName(int a_Tag, const char * a_Name, size_t a_NameLength = 0) const;
- int FindTagByPath (int a_Tag, const AString & a_Path) const;
-
- eTagType GetType(int a_Tag) const { return m_Tags[a_Tag].m_Type; }
-
- /// Returns the children type for a list tag; undefined on other tags. If list empty, returns TAG_End
- eTagType GetChildrenType(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_List);
- return (m_Tags[a_Tag].m_FirstChild < 0) ? TAG_End : m_Tags[m_Tags[a_Tag].m_FirstChild].m_Type;
- }
-
- inline unsigned char GetByte(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_Byte);
- return (unsigned char)(m_Data[m_Tags[a_Tag].m_DataStart]);
- }
-
- inline Int16 GetShort(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_Short);
- return ntohs(*((Int16 *)(m_Data + m_Tags[a_Tag].m_DataStart)));
- }
-
- inline Int32 GetInt(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_Int);
- return ntohl(*((Int32 *)(m_Data + m_Tags[a_Tag].m_DataStart)));
- }
-
- inline Int64 GetLong(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_Long);
- return NetworkToHostLong8(m_Data + m_Tags[a_Tag].m_DataStart);
- }
-
- inline float GetFloat(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_Float);
- Int32 tmp = ntohl(*((Int32 *)(m_Data + m_Tags[a_Tag].m_DataStart)));
- return *((float *)&tmp);
- }
-
- inline double GetDouble(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_Double);
- return NetworkToHostDouble8(m_Data + m_Tags[a_Tag].m_DataStart);
- }
-
- inline AString GetString(int a_Tag) const
- {
- ASSERT(m_Tags[a_Tag].m_Type == TAG_String);
- AString res;
- res.assign(m_Data + m_Tags[a_Tag].m_DataStart, m_Tags[a_Tag].m_DataLength);
- return res;
- }
-
- inline AString GetName(int a_Tag) const
- {
- AString res;
- res.assign(m_Data + m_Tags[a_Tag].m_NameStart, m_Tags[a_Tag].m_NameLength);
- return res;
- }
-
-protected:
- const char * m_Data;
- int m_Length;
- std::vector<cFastNBTTag> m_Tags;
- bool m_IsValid; // True if parsing succeeded
-
- // Used while parsing:
- int m_Pos;
-
- bool Parse(void);
- bool ReadString(int & a_StringStart, int & a_StringLen); // Reads a simple string (2 bytes length + data), sets the string descriptors
- bool ReadCompound(void); // Reads the latest tag as a compound
- bool ReadList(eTagType a_ChildrenType); // Reads the latest tag as a list of items of type a_ChildrenType
- bool ReadTag(void); // Reads the latest tag, depending on its m_Type setting
-} ;
-
-
-
-
-
-class cFastNBTWriter
-{
-public:
- cFastNBTWriter(const AString & a_RootTagName = "");
-
- void BeginCompound(const AString & a_Name);
- void EndCompound(void);
-
- void BeginList(const AString & a_Name, eTagType a_ChildrenType);
- void EndList(void);
-
- void AddByte (const AString & a_Name, unsigned char a_Value);
- void AddShort (const AString & a_Name, Int16 a_Value);
- void AddInt (const AString & a_Name, Int32 a_Value);
- void AddLong (const AString & a_Name, Int64 a_Value);
- void AddFloat (const AString & a_Name, float a_Value);
- void AddDouble (const AString & a_Name, double a_Value);
- void AddString (const AString & a_Name, const AString & a_Value);
- void AddByteArray(const AString & a_Name, const char * a_Value, size_t a_NumElements);
- void AddIntArray (const AString & a_Name, const int * a_Value, size_t a_NumElements);
-
- void AddByteArray(const AString & a_Name, const AString & a_Value)
- {
- AddByteArray(a_Name, a_Value.data(), a_Value.size());
- }
-
- const AString & GetResult(void) const {return m_Result; }
-
- void Finish(void);
-
-protected:
-
- struct sParent
- {
- int m_Type; // TAG_Compound or TAG_List
- int m_Pos; // for TAG_List, the position of the list count
- int m_Count; // for TAG_List, the element count
- eTagType m_ItemType; // for TAG_List, the element type
- } ;
-
- static const int MAX_STACK = 50; // Highliy doubtful that an NBT would be constructed this many levels deep
-
- // These two fields emulate a stack. A raw array is used due to speed issues - no reallocations are allowed.
- sParent m_Stack[MAX_STACK];
- int m_CurrentStack;
-
- AString m_Result;
-
- bool IsStackTopCompound(void) const { return (m_Stack[m_CurrentStack].m_Type == TAG_Compound); }
-
- void WriteString(const char * a_Data, short a_Length);
-
- inline void TagCommon(const AString & a_Name, eTagType a_Type)
- {
- // If we're directly inside a list, check that the list is of the correct type:
- ASSERT((m_Stack[m_CurrentStack].m_Type != TAG_List) || (m_Stack[m_CurrentStack].m_ItemType == a_Type));
-
- if (IsStackTopCompound())
- {
- // Compound: add the type and name:
- m_Result.push_back((char)a_Type);
- WriteString(a_Name.c_str(), (short)a_Name.length());
- }
- else
- {
- // List: add to the counter
- m_Stack[m_CurrentStack].m_Count++;
- }
- }
-} ;
-
-
-
-
diff --git a/source/WorldStorage/NBTChunkSerializer.cpp b/source/WorldStorage/NBTChunkSerializer.cpp
deleted file mode 100644
index c9013b1b3..000000000
--- a/source/WorldStorage/NBTChunkSerializer.cpp
+++ /dev/null
@@ -1,533 +0,0 @@
-
-// NBTChunkSerializer.cpp
-
-
-#include "Globals.h"
-#include "NBTChunkSerializer.h"
-#include "../BlockID.h"
-#include "../BlockEntities/ChestEntity.h"
-#include "../BlockEntities/DispenserEntity.h"
-#include "../BlockEntities/DropperEntity.h"
-#include "../BlockEntities/FurnaceEntity.h"
-#include "../BlockEntities/HopperEntity.h"
-#include "../BlockEntities/JukeboxEntity.h"
-#include "../BlockEntities/NoteEntity.h"
-#include "../BlockEntities/SignEntity.h"
-#include "../ItemGrid.h"
-#include "../StringCompression.h"
-#include "../Entities/Entity.h"
-#include "FastNBT.h"
-#include "../Entities/FallingBlock.h"
-#include "../Entities/Boat.h"
-#include "../Entities/Minecart.h"
-#include "../Mobs/Monster.h"
-#include "../Entities/Pickup.h"
-#include "../Entities/ProjectileEntity.h"
-
-
-
-
-
-cNBTChunkSerializer::cNBTChunkSerializer(cFastNBTWriter & a_Writer) :
- m_BiomesAreValid(false),
- m_Writer(a_Writer),
- m_IsTagOpen(false),
- m_HasHadEntity(false),
- m_HasHadBlockEntity(false),
- m_IsLightValid(false)
-{
-}
-
-
-
-
-
-void cNBTChunkSerializer::Finish(void)
-{
- if (m_IsTagOpen)
- {
- m_Writer.EndList();
- }
-
- // If light not valid, reset it to all zeroes:
- if (!m_IsLightValid)
- {
- memset(m_BlockLight, 0, sizeof(m_BlockLight));
- memset(m_BlockSkyLight, 0, sizeof(m_BlockSkyLight));
- }
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddItem(const cItem & a_Item, int a_Slot, const AString & a_CompoundName)
-{
- m_Writer.BeginCompound(a_CompoundName);
- m_Writer.AddShort("id", (short)(a_Item.m_ItemType));
- m_Writer.AddShort("Damage", a_Item.m_ItemDamage);
- m_Writer.AddByte ("Count", a_Item.m_ItemCount);
- if (a_Slot >= 0)
- {
- m_Writer.AddByte ("Slot", (unsigned char)a_Slot);
- }
-
- // Write the enchantments:
- if (!a_Item.m_Enchantments.IsEmpty())
- {
- const char * TagName = (a_Item.m_ItemType == E_ITEM_BOOK) ? "StoredEnchantments" : "ench";
- m_Writer.BeginCompound("tag");
- a_Item.m_Enchantments.WriteToNBTCompound(m_Writer, TagName);
- m_Writer.EndCompound();
- }
-
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddItemGrid(const cItemGrid & a_Grid, int a_BeginSlotNum)
-{
- int NumSlots = a_Grid.GetNumSlots();
- for (int i = 0; i < NumSlots; i++)
- {
- const cItem & Item = a_Grid.GetSlot(i);
- if (Item.IsEmpty())
- {
- continue;
- }
- AddItem(Item, i + a_BeginSlotNum);
- } // for i - chest slots[]
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddBasicTileEntity(cBlockEntity * a_Entity, const char * a_EntityTypeID)
-{
- m_Writer.AddInt ("x", a_Entity->GetPosX());
- m_Writer.AddInt ("y", a_Entity->GetPosY());
- m_Writer.AddInt ("z", a_Entity->GetPosZ());
- m_Writer.AddString("id", a_EntityTypeID);
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddChestEntity(cChestEntity * a_Entity)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Entity, "Chest");
- m_Writer.BeginList("Items", TAG_Compound);
- AddItemGrid(a_Entity->GetContents());
- m_Writer.EndList();
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddDispenserEntity(cDispenserEntity * a_Entity)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Entity, "Trap");
- m_Writer.BeginList("Items", TAG_Compound);
- AddItemGrid(a_Entity->GetContents());
- m_Writer.EndList();
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddDropperEntity(cDropperEntity * a_Entity)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Entity, "Dropper");
- m_Writer.BeginList("Items", TAG_Compound);
- AddItemGrid(a_Entity->GetContents());
- m_Writer.EndList();
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddFurnaceEntity(cFurnaceEntity * a_Furnace)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Furnace, "Furnace");
- m_Writer.BeginList("Items", TAG_Compound);
- AddItemGrid(a_Furnace->GetContents());
- m_Writer.EndList();
- m_Writer.AddShort("BurnTime", a_Furnace->GetFuelBurnTimeLeft());
- m_Writer.AddShort("CookTime", a_Furnace->GetTimeCooked());
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddHopperEntity(cHopperEntity * a_Entity)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Entity, "Hopper");
- m_Writer.BeginList("Items", TAG_Compound);
- AddItemGrid(a_Entity->GetContents());
- m_Writer.EndList();
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddJukeboxEntity(cJukeboxEntity * a_Jukebox)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Jukebox, "RecordPlayer");
- m_Writer.AddInt("Record", a_Jukebox->GetRecord());
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddNoteEntity(cNoteEntity * a_Note)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Note, "Music");
- m_Writer.AddByte("note", a_Note->GetPitch());
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddSignEntity(cSignEntity * a_Sign)
-{
- m_Writer.BeginCompound("");
- AddBasicTileEntity(a_Sign, "Sign");
- m_Writer.AddString("Text1", a_Sign->GetLine(0));
- m_Writer.AddString("Text2", a_Sign->GetLine(1));
- m_Writer.AddString("Text3", a_Sign->GetLine(2));
- m_Writer.AddString("Text4", a_Sign->GetLine(3));
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddBasicEntity(cEntity * a_Entity, const AString & a_ClassName)
-{
- m_Writer.AddString("id", a_ClassName);
- m_Writer.BeginList("Pos", TAG_Double);
- m_Writer.AddDouble("", a_Entity->GetPosX());
- m_Writer.AddDouble("", a_Entity->GetPosY());
- m_Writer.AddDouble("", a_Entity->GetPosZ());
- m_Writer.EndList();
- m_Writer.BeginList("Motion", TAG_Double);
- m_Writer.AddDouble("", a_Entity->GetSpeedX());
- m_Writer.AddDouble("", a_Entity->GetSpeedY());
- m_Writer.AddDouble("", a_Entity->GetSpeedZ());
- m_Writer.EndList();
- m_Writer.BeginList("Rotation", TAG_Double);
- m_Writer.AddDouble("", a_Entity->GetRotation());
- m_Writer.AddDouble("", a_Entity->GetPitch());
- m_Writer.EndList();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddBoatEntity(cBoat * a_Boat)
-{
- m_Writer.BeginCompound("");
- AddBasicEntity(a_Boat, "Boat");
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddFallingBlockEntity(cFallingBlock * a_FallingBlock)
-{
- m_Writer.BeginCompound("");
- AddBasicEntity(a_FallingBlock, "FallingSand");
- m_Writer.AddInt("TileID", a_FallingBlock->GetBlockType());
- m_Writer.AddByte("Data", a_FallingBlock->GetBlockMeta());
- m_Writer.AddByte("Time", 1); // Unused in MCServer, Vanilla said to need nonzero
- m_Writer.AddByte("DropItem", 1);
- m_Writer.AddByte("HurtEntities", a_FallingBlock->GetBlockType() == E_BLOCK_ANVIL);
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddMinecartEntity(cMinecart * a_Minecart)
-{
- const char * EntityClass = NULL;
- switch (a_Minecart->GetPayload())
- {
- case cMinecart::mpNone: EntityClass = "MinecartRideable"; break;
- case cMinecart::mpChest: EntityClass = "MinecartChest"; break;
- case cMinecart::mpFurnace: EntityClass = "MinecartFurnace"; break;
- case cMinecart::mpTNT: EntityClass = "MinecartTNT"; break;
- case cMinecart::mpHopper: EntityClass = "MinecartHopper"; break;
- default:
- {
- ASSERT(!"Unhandled minecart payload type");
- return;
- }
- } // switch (payload)
-
- m_Writer.BeginCompound("");
- AddBasicEntity(a_Minecart, EntityClass);
- switch (a_Minecart->GetPayload())
- {
- case cMinecart::mpChest:
- {
- // Add chest contents into the Items tag:
- AddMinecartChestContents((cMinecartWithChest *)a_Minecart);
- break;
- }
-
- case cMinecart::mpFurnace:
- {
- // TODO: Add "Push" and "Fuel" tags
- break;
- }
- } // switch (Payload)
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddMonsterEntity(cMonster * a_Monster)
-{
- // TODO
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddPickupEntity(cPickup * a_Pickup)
-{
- m_Writer.BeginCompound("");
- AddBasicEntity(a_Pickup, "Item");
- AddItem(a_Pickup->GetItem(), -1, "Item");
- m_Writer.AddShort("Health", a_Pickup->GetHealth());
- m_Writer.AddShort("Age", a_Pickup->GetAge());
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddProjectileEntity(cProjectileEntity * a_Projectile)
-{
- m_Writer.BeginCompound("");
- AddBasicEntity(a_Projectile, a_Projectile->GetMCAClassName());
- Vector3d Pos = a_Projectile->GetPosition();
- m_Writer.AddShort("xTile", (Int16)floor(Pos.x));
- m_Writer.AddShort("yTile", (Int16)floor(Pos.y));
- m_Writer.AddShort("zTile", (Int16)floor(Pos.z));
- m_Writer.AddShort("inTile", 0); // TODO: Query the block type
- m_Writer.AddShort("shake", 0); // TODO: Any shake?
- m_Writer.AddByte ("inGround", a_Projectile->IsInGround() ? 1 : 0);
-
- switch (a_Projectile->GetProjectileKind())
- {
- case cProjectileEntity::pkArrow:
- {
- m_Writer.AddByte("inData", 0); // TODO: Query the block meta (is it needed?)
- m_Writer.AddByte("pickup", ((cArrowEntity *)a_Projectile)->GetPickupState());
- m_Writer.AddDouble("damage", ((cArrowEntity *)a_Projectile)->GetDamageCoeff());
- break;
- }
- case cProjectileEntity::pkGhastFireball:
- {
- m_Writer.AddInt("ExplosionPower", 1);
- // fall-through:
- }
- case cProjectileEntity::pkFireCharge:
- case cProjectileEntity::pkWitherSkull:
- case cProjectileEntity::pkEnderPearl:
- {
- m_Writer.BeginList("Motion", TAG_Double);
- m_Writer.AddDouble("", a_Projectile->GetSpeedX());
- m_Writer.AddDouble("", a_Projectile->GetSpeedY());
- m_Writer.AddDouble("", a_Projectile->GetSpeedZ());
- m_Writer.EndList();
- break;
- }
- default:
- {
- ASSERT(!"Unsaved projectile entity!");
- }
- } // switch (ProjectileKind)
- cEntity * Creator = a_Projectile->GetCreator();
- if (Creator != NULL)
- {
- if (Creator->GetEntityType() == cEntity::etPlayer)
- {
- m_Writer.AddString("ownerName", ((cPlayer *)Creator)->GetName());
- }
- }
- m_Writer.EndCompound();
-}
-
-
-
-
-
-void cNBTChunkSerializer::AddMinecartChestContents(cMinecartWithChest * a_Minecart)
-{
- m_Writer.BeginList("Items", TAG_Compound);
- for (int i = 0; i < cMinecartWithChest::NumSlots; i++)
- {
- const cItem & Item = a_Minecart->GetSlot(i);
- if (Item.IsEmpty())
- {
- continue;
- }
- AddItem(Item, i);
- }
- m_Writer.EndList();
-}
-
-
-
-
-
-bool cNBTChunkSerializer::LightIsValid(bool a_IsLightValid)
-{
- m_IsLightValid = a_IsLightValid;
- return a_IsLightValid; // We want lighting only if it's valid, otherwise don't bother
-}
-
-
-
-
-
-void cNBTChunkSerializer::BiomeData(const cChunkDef::BiomeMap * a_BiomeMap)
-{
- memcpy(m_Biomes, a_BiomeMap, sizeof(m_Biomes));
- for (int i = 0; i < ARRAYCOUNT(m_Biomes); i++)
- {
- if ((*a_BiomeMap)[i] < 255)
- {
- // Normal MC biome, copy as-is:
- m_VanillaBiomes[i] = (unsigned char)((*a_BiomeMap)[i]);
- }
- else
- {
- // TODO: MCS-specific biome, need to map to some basic MC biome:
- ASSERT(!"Unimplemented MCS-specific biome");
- return;
- }
- } // for i - m_BiomeMap[]
- m_BiomesAreValid = true;
-}
-
-
-
-
-
-void cNBTChunkSerializer::Entity(cEntity * a_Entity)
-{
- // Add entity into NBT:
- if (m_IsTagOpen)
- {
- if (!m_HasHadEntity)
- {
- m_Writer.EndList();
- m_Writer.BeginList("Entities", TAG_Compound);
- }
- }
- else
- {
- m_Writer.BeginList("Entities", TAG_Compound);
- }
- m_IsTagOpen = true;
- m_HasHadEntity = true;
-
- switch (a_Entity->GetEntityType())
- {
- case cEntity::etBoat: AddBoatEntity ((cBoat *) a_Entity); break;
- case cEntity::etFallingBlock: AddFallingBlockEntity((cFallingBlock *) a_Entity); break;
- case cEntity::etMinecart: AddMinecartEntity ((cMinecart *) a_Entity); break;
- case cEntity::etMonster: AddMonsterEntity ((cMonster *) a_Entity); break;
- case cEntity::etPickup: AddPickupEntity ((cPickup *) a_Entity); break;
- case cEntity::etProjectile: AddProjectileEntity ((cProjectileEntity *)a_Entity); break;
- case cEntity::etPlayer: return; // Players aren't saved into the world
- default:
- {
- ASSERT(!"Unhandled entity type is being saved");
- break;
- }
- }
-}
-
-
-
-
-
-void cNBTChunkSerializer::BlockEntity(cBlockEntity * a_Entity)
-{
- if (m_IsTagOpen)
- {
- if (!m_HasHadBlockEntity)
- {
- m_Writer.EndList();
- m_Writer.BeginList("TileEntities", TAG_Compound);
- }
- }
- else
- {
- m_Writer.BeginList("TileEntities", TAG_Compound);
- }
- m_IsTagOpen = true;
-
- // Add tile-entity into NBT:
- switch (a_Entity->GetBlockType())
- {
- case E_BLOCK_CHEST: AddChestEntity ((cChestEntity *) a_Entity); break;
- case E_BLOCK_DISPENSER: AddDispenserEntity ((cDispenserEntity *) a_Entity); break;
- case E_BLOCK_DROPPER: AddDropperEntity ((cDropperEntity *) a_Entity); break;
- case E_BLOCK_FURNACE: AddFurnaceEntity ((cFurnaceEntity *) a_Entity); break;
- case E_BLOCK_HOPPER: AddHopperEntity ((cHopperEntity *) a_Entity); break;
- case E_BLOCK_SIGN_POST:
- case E_BLOCK_WALLSIGN: AddSignEntity ((cSignEntity *) a_Entity); break;
- case E_BLOCK_NOTE_BLOCK: AddNoteEntity ((cNoteEntity *) a_Entity); break;
- case E_BLOCK_JUKEBOX: AddJukeboxEntity ((cJukeboxEntity *) a_Entity); break;
- default:
- {
- ASSERT(!"Unhandled block entity saved into Anvil");
- }
- }
- m_HasHadBlockEntity = true;
-}
-
-
-
-
diff --git a/source/WorldStorage/NBTChunkSerializer.h b/source/WorldStorage/NBTChunkSerializer.h
deleted file mode 100644
index 9d4ac208c..000000000
--- a/source/WorldStorage/NBTChunkSerializer.h
+++ /dev/null
@@ -1,116 +0,0 @@
-
-// NBTChunkSerializer.h
-
-// Declares the cNBTChunkSerializer class that is used for saving individual chunks into NBT format used by Anvil
-
-
-
-
-
-#pragma once
-
-#include "../ChunkDef.h"
-
-
-
-
-
-// fwd:
-class cFastNBTWriter;
-class cEntity;
-class cBlockEntity;
-class cBoat;
-class cChestEntity;
-class cDispenserEntity;
-class cDropperEntity;
-class cFurnaceEntity;
-class cHopperEntity;
-class cJukeboxEntity;
-class cNoteEntity;
-class cSignEntity;
-class cFallingBlock;
-class cMinecart;
-class cMinecartWithChest;
-class cMinecartWithFurnace;
-class cMinecartWithTNT;
-class cMinecartWithHopper;
-class cMonster;
-class cPickup;
-class cItemGrid;
-class cProjectileEntity;
-
-
-
-
-
-class cNBTChunkSerializer :
- public cChunkDataSeparateCollector
-{
-public:
- cChunkDef::BiomeMap m_Biomes;
- unsigned char m_VanillaBiomes[cChunkDef::Width * cChunkDef::Width];
- bool m_BiomesAreValid;
-
-
- cNBTChunkSerializer(cFastNBTWriter & a_Writer);
-
- /// Close NBT tags that we've opened
- void Finish(void);
-
- bool IsLightValid(void) const {return m_IsLightValid; }
-
-protected:
-
- /* From cChunkDataSeparateCollector we inherit:
- - m_BlockTypes[]
- - m_BlockMetas[]
- - m_BlockLight[]
- - m_BlockSkyLight[]
- */
-
- cFastNBTWriter & m_Writer;
-
- bool m_IsTagOpen; // True if a tag has been opened in the callbacks and not yet closed.
- bool m_HasHadEntity; // True if any Entity has already been received and processed
- bool m_HasHadBlockEntity; // True if any BlockEntity has already been received and processed
- bool m_IsLightValid; // True if the chunk lighting is valid
-
-
- /// Writes an item into the writer, if slot >= 0, adds the Slot tag. The compound is named as requested.
- void AddItem(const cItem & a_Item, int a_Slot, const AString & a_CompoundName = "");
-
- /// Writes an item grid into the writer; begins the stored slot numbers with a_BeginSlotNum. Note that it doesn't begin nor end the list tag
- void AddItemGrid(const cItemGrid & a_Grid, int a_BeginSlotNum = 0);
-
- // Block entities:
- void AddBasicTileEntity(cBlockEntity * a_Entity, const char * a_EntityTypeID);
- void AddChestEntity (cChestEntity * a_Entity);
- void AddDispenserEntity(cDispenserEntity * a_Entity);
- void AddDropperEntity (cDropperEntity * a_Entity);
- void AddFurnaceEntity (cFurnaceEntity * a_Furnace);
- void AddHopperEntity (cHopperEntity * a_Entity);
- void AddJukeboxEntity (cJukeboxEntity * a_Jukebox);
- void AddNoteEntity (cNoteEntity * a_Note);
- void AddSignEntity (cSignEntity * a_Sign);
-
- // Entities:
- void AddBasicEntity (cEntity * a_Entity, const AString & a_ClassName);
- void AddBoatEntity (cBoat * a_Boat);
- void AddFallingBlockEntity(cFallingBlock * a_FallingBlock);
- void AddMinecartEntity (cMinecart * a_Minecart);
- void AddMonsterEntity (cMonster * a_Monster);
- void AddPickupEntity (cPickup * a_Pickup);
- void AddProjectileEntity (cProjectileEntity * a_Projectile);
-
- void AddMinecartChestContents(cMinecartWithChest * a_Minecart);
-
- // cChunkDataSeparateCollector overrides:
- virtual bool LightIsValid(bool a_IsLightValid) override;
- virtual void BiomeData(const cChunkDef::BiomeMap * a_BiomeMap) override;
- virtual void Entity(cEntity * a_Entity) override;
- virtual void BlockEntity(cBlockEntity * a_Entity) override;
-} ; // class cNBTChunkSerializer
-
-
-
-
diff --git a/source/WorldStorage/WSSAnvil.cpp b/source/WorldStorage/WSSAnvil.cpp
deleted file mode 100644
index b2e104a78..000000000
--- a/source/WorldStorage/WSSAnvil.cpp
+++ /dev/null
@@ -1,1555 +0,0 @@
-
-// WSSAnvil.cpp
-
-// Implements the cWSSAnvil class representing the Anvil world storage scheme
-
-#include "Globals.h"
-#include "WSSAnvil.h"
-#include "NBTChunkSerializer.h"
-#include "../World.h"
-#include "zlib.h"
-#include "../BlockID.h"
-#include "../BlockEntities/ChestEntity.h"
-#include "../BlockEntities/DispenserEntity.h"
-#include "../BlockEntities/DropperEntity.h"
-#include "../BlockEntities/FurnaceEntity.h"
-#include "../BlockEntities/HopperEntity.h"
-#include "../BlockEntities/JukeboxEntity.h"
-#include "../BlockEntities/NoteEntity.h"
-#include "../BlockEntities/SignEntity.h"
-#include "../Item.h"
-#include "../ItemGrid.h"
-#include "../StringCompression.h"
-#include "FastNBT.h"
-#include "../Mobs/Monster.h"
-#include "../Entities/Boat.h"
-#include "../Entities/FallingBlock.h"
-#include "../Entities/Minecart.h"
-#include "../Entities/Pickup.h"
-#include "../Entities/ProjectileEntity.h"
-
-
-
-
-
-/** If defined, the BlockSkyLight values will be copied over to BlockLight upon chunk saving,
-thus making skylight visible in Minutor's Lighting mode
-*/
-// #define DEBUG_SKYLIGHT
-
-/** Maximum number of MCA files that are cached in memory.
-Since only the header is actually in the memory, this number can be high, but still, each file means an OS FS handle.
-*/
-#define MAX_MCA_FILES 32
-
-/// The maximum size of an inflated chunk; raw chunk data is 192 KiB, allow 64 KiB more of entities
-#define CHUNK_INFLATE_MAX 256 KiB
-
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cWSSAnvil:
-
-cWSSAnvil::cWSSAnvil(cWorld * a_World) :
- super(a_World)
-{
- // Create a level.dat file for mapping tools, if it doesn't already exist:
- AString fnam;
- Printf(fnam, "%s/level.dat", a_World->GetName().c_str());
- if (!cFile::Exists(fnam))
- {
- cFastNBTWriter Writer;
- Writer.BeginCompound("");
- Writer.AddInt("SpawnX", (int)(a_World->GetSpawnX()));
- Writer.AddInt("SpawnY", (int)(a_World->GetSpawnY()));
- Writer.AddInt("SpawnZ", (int)(a_World->GetSpawnZ()));
- Writer.EndCompound();
- Writer.Finish();
-
- #ifdef _DEBUG
- cParsedNBT TestParse(Writer.GetResult().data(), Writer.GetResult().size());
- ASSERT(TestParse.IsValid());
- #endif // _DEBUG
-
- gzFile gz = gzopen((FILE_IO_PREFIX + fnam).c_str(), "wb");
- if (gz != NULL)
- {
- gzwrite(gz, Writer.GetResult().data(), Writer.GetResult().size());
- }
- gzclose(gz);
- }
-}
-
-
-
-
-
-cWSSAnvil::~cWSSAnvil()
-{
- cCSLock Lock(m_CS);
- for (cMCAFiles::iterator itr = m_Files.begin(); itr != m_Files.end(); ++itr)
- {
- delete *itr;
- } // for itr - m_Files[]
-}
-
-
-
-
-
-bool cWSSAnvil::LoadChunk(const cChunkCoords & a_Chunk)
-{
- AString ChunkData;
- if (!GetChunkData(a_Chunk, ChunkData))
- {
- // The reason for failure is already printed in GetChunkData()
- return false;
- }
-
- return LoadChunkFromData(a_Chunk, ChunkData);
-}
-
-
-
-
-
-bool cWSSAnvil::SaveChunk(const cChunkCoords & a_Chunk)
-{
- AString ChunkData;
- if (!SaveChunkToData(a_Chunk, ChunkData))
- {
- LOGWARNING("Cannot serialize chunk [%d, %d] into data", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
- return false;
- }
- if (!SetChunkData(a_Chunk, ChunkData))
- {
- LOGWARNING("Cannot store chunk [%d, %d] data", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
- return false;
- }
-
- // Everything successful
- return true;
-}
-
-
-
-
-
-bool cWSSAnvil::GetChunkData(const cChunkCoords & a_Chunk, AString & a_Data)
-{
- cCSLock Lock(m_CS);
- cMCAFile * File = LoadMCAFile(a_Chunk);
- if (File == NULL)
- {
- return false;
- }
- return File->GetChunkData(a_Chunk, a_Data);
-}
-
-
-
-
-
-bool cWSSAnvil::SetChunkData(const cChunkCoords & a_Chunk, const AString & a_Data)
-{
- cCSLock Lock(m_CS);
- cMCAFile * File = LoadMCAFile(a_Chunk);
- if (File == NULL)
- {
- return false;
- }
- return File->SetChunkData(a_Chunk, a_Data);
-}
-
-
-
-
-
-cWSSAnvil::cMCAFile * cWSSAnvil::LoadMCAFile(const cChunkCoords & a_Chunk)
-{
- // ASSUME m_CS is locked
- ASSERT(m_CS.IsLocked());
-
- const int RegionX = FAST_FLOOR_DIV(a_Chunk.m_ChunkX, 32);
- const int RegionZ = FAST_FLOOR_DIV(a_Chunk.m_ChunkZ, 32);
- ASSERT(a_Chunk.m_ChunkX - RegionX * 32 >= 0);
- ASSERT(a_Chunk.m_ChunkZ - RegionZ * 32 >= 0);
- ASSERT(a_Chunk.m_ChunkX - RegionX * 32 < 32);
- ASSERT(a_Chunk.m_ChunkZ - RegionZ * 32 < 32);
-
- // Is it already cached?
- for (cMCAFiles::iterator itr = m_Files.begin(); itr != m_Files.end(); ++itr)
- {
- if (((*itr) != NULL) && ((*itr)->GetRegionX() == RegionX) && ((*itr)->GetRegionZ() == RegionZ))
- {
- // Move the file to front and return it:
- cMCAFile * f = *itr;
- if (itr != m_Files.begin())
- {
- m_Files.erase(itr);
- m_Files.push_front(f);
- }
- return f;
- }
- }
-
- // Load it anew:
- AString FileName;
- Printf(FileName, "%s/region", m_World->GetName().c_str());
- cFile::CreateFolder(FILE_IO_PREFIX + FileName);
- AppendPrintf(FileName, "/r.%d.%d.mca", RegionX, RegionZ);
- cMCAFile * f = new cMCAFile(FileName, RegionX, RegionZ);
- if (f == NULL)
- {
- return NULL;
- }
- m_Files.push_front(f);
-
- // If there are too many MCA files cached, delete the last one used:
- if (m_Files.size() > MAX_MCA_FILES)
- {
- delete m_Files.back();
- m_Files.pop_back();
- }
- return f;
-}
-
-
-
-
-
-bool cWSSAnvil::LoadChunkFromData(const cChunkCoords & a_Chunk, const AString & a_Data)
-{
- // Decompress the data:
- char Uncompressed[CHUNK_INFLATE_MAX];
- z_stream strm;
- strm.zalloc = (alloc_func)NULL;
- strm.zfree = (free_func)NULL;
- strm.opaque = NULL;
- inflateInit(&strm);
- strm.next_out = (Bytef *)Uncompressed;
- strm.avail_out = sizeof(Uncompressed);
- strm.next_in = (Bytef *)a_Data.data();
- strm.avail_in = a_Data.size();
- int res = inflate(&strm, Z_FINISH);
- inflateEnd(&strm);
- if (res != Z_STREAM_END)
- {
- return false;
- }
-
- // Parse the NBT data:
- cParsedNBT NBT(Uncompressed, strm.total_out);
- if (!NBT.IsValid())
- {
- // NBT Parsing failed
- return false;
- }
-
- // Load the data from NBT:
- return LoadChunkFromNBT(a_Chunk, NBT);
-}
-
-
-
-
-
-bool cWSSAnvil::SaveChunkToData(const cChunkCoords & a_Chunk, AString & a_Data)
-{
- cFastNBTWriter Writer;
- if (!SaveChunkToNBT(a_Chunk, Writer))
- {
- LOGWARNING("Cannot save chunk [%d, %d] to NBT", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
- return false;
- }
- Writer.Finish();
-
- CompressString(Writer.GetResult().data(), Writer.GetResult().size(), a_Data);
- return true;
-}
-
-
-
-
-
-bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT & a_NBT)
-{
- // The data arrays, in MCA-native y/z/x ordering (will be reordered for the final chunk data)
- cChunkDef::BlockTypes BlockTypes;
- cChunkDef::BlockNibbles MetaData;
- cChunkDef::BlockNibbles BlockLight;
- cChunkDef::BlockNibbles SkyLight;
-
- memset(BlockTypes, E_BLOCK_AIR, sizeof(BlockTypes));
- memset(MetaData, 0, sizeof(MetaData));
- memset(SkyLight, 0xff, sizeof(SkyLight)); // By default, data not present in the NBT means air, which means full skylight
- memset(BlockLight, 0x00, sizeof(BlockLight));
-
- // Load the blockdata, blocklight and skylight:
- int Level = a_NBT.FindChildByName(0, "Level");
- if (Level < 0)
- {
- return false;
- }
- int Sections = a_NBT.FindChildByName(Level, "Sections");
- if ((Sections < 0) || (a_NBT.GetType(Sections) != TAG_List) || (a_NBT.GetChildrenType(Sections) != TAG_Compound))
- {
- return false;
- }
- for (int Child = a_NBT.GetFirstChild(Sections); Child >= 0; Child = a_NBT.GetNextSibling(Child))
- {
- int y = 0;
- int SectionY = a_NBT.FindChildByName(Child, "Y");
- if ((SectionY < 0) || (a_NBT.GetType(SectionY) != TAG_Byte))
- {
- continue;
- }
- y = a_NBT.GetByte(SectionY);
- if ((y < 0) || (y > 15))
- {
- continue;
- }
- CopyNBTData(a_NBT, Child, "Blocks", (char *)&(BlockTypes[y * 4096]), 4096);
- CopyNBTData(a_NBT, Child, "Data", (char *)&(MetaData[y * 2048]), 2048);
- CopyNBTData(a_NBT, Child, "SkyLight", (char *)&(SkyLight[y * 2048]), 2048);
- CopyNBTData(a_NBT, Child, "BlockLight", (char *)&(BlockLight[y * 2048]), 2048);
- } // for itr - LevelSections[]
-
- // Load the biomes from NBT, if present and valid. First try MCS-style, then Vanilla-style:
- cChunkDef::BiomeMap BiomeMap;
- cChunkDef::BiomeMap * Biomes = LoadBiomeMapFromNBT(&BiomeMap, a_NBT, a_NBT.FindChildByName(Level, "MCSBiomes"));
- if (Biomes == NULL)
- {
- // MCS-style biomes not available, load vanilla-style:
- Biomes = LoadVanillaBiomeMapFromNBT(&BiomeMap, a_NBT, a_NBT.FindChildByName(Level, "Biomes"));
- }
-
- // Load the entities from NBT:
- cEntityList Entities;
- cBlockEntityList BlockEntities;
- LoadEntitiesFromNBT (Entities, a_NBT, a_NBT.FindChildByName(Level, "Entities"));
- LoadBlockEntitiesFromNBT(BlockEntities, a_NBT, a_NBT.FindChildByName(Level, "TileEntities"), BlockTypes, MetaData);
-
- bool IsLightValid = (a_NBT.FindChildByName(Level, "MCSIsLightValid") > 0);
-
- /*
- // Uncomment this block for really cool stuff :)
- // DEBUG magic: Invert the underground, so that we can see the MC generator in action :)
- bool ShouldInvert[cChunkDef::Width * cChunkDef::Width];
- memset(ShouldInvert, 0, sizeof(ShouldInvert));
- for (int y = cChunkDef::Height - 1; y >= 0; y--)
- {
- for (int x = 0; x < cChunkDef::Width; x++) for (int z = 0; z < cChunkDef::Width; z++)
- {
- int Index = cChunkDef::MakeIndexNoCheck(x, y, z);
- if (ShouldInvert[x + cChunkDef::Width * z])
- {
- BlockTypes[Index] = (BlockTypes[Index] == E_BLOCK_AIR) ? E_BLOCK_STONE : E_BLOCK_AIR;
- }
- else
- {
- switch (BlockTypes[Index])
- {
- case E_BLOCK_AIR:
- case E_BLOCK_LEAVES:
- {
- // nothing needed
- break;
- }
- default:
- {
- ShouldInvert[x + cChunkDef::Width * z] = true;
- }
- }
- BlockTypes[Index] = E_BLOCK_AIR;
- }
- }
- } // for y
- //*/
-
- m_World->SetChunkData(
- a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ,
- BlockTypes, MetaData,
- IsLightValid ? BlockLight : NULL,
- IsLightValid ? SkyLight : NULL,
- NULL, Biomes,
- Entities, BlockEntities,
- false
- );
- return true;
-}
-
-
-
-
-void cWSSAnvil::CopyNBTData(const cParsedNBT & a_NBT, int a_Tag, const AString & a_ChildName, char * a_Destination, int a_Length)
-{
- int Child = a_NBT.FindChildByName(a_Tag, a_ChildName);
- if ((Child >= 0) && (a_NBT.GetType(Child) == TAG_ByteArray) && (a_NBT.GetDataLength(Child) == a_Length))
- {
- memcpy(a_Destination, a_NBT.GetData(Child), a_Length);
- }
-}
-
-
-
-
-
-bool cWSSAnvil::SaveChunkToNBT(const cChunkCoords & a_Chunk, cFastNBTWriter & a_Writer)
-{
- a_Writer.BeginCompound("Level");
- a_Writer.AddInt("xPos", a_Chunk.m_ChunkX);
- a_Writer.AddInt("zPos", a_Chunk.m_ChunkZ);
- cNBTChunkSerializer Serializer(a_Writer);
- if (!m_World->GetChunkData(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, Serializer))
- {
- LOGWARNING("Cannot get chunk [%d, %d] data for NBT saving", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
- return false;
- }
- Serializer.Finish(); // Close NBT tags
-
- // Save biomes, both MCS (IntArray) and MC-vanilla (ByteArray):
- if (Serializer.m_BiomesAreValid)
- {
- a_Writer.AddByteArray("Biomes", (const char *)(Serializer.m_VanillaBiomes), ARRAYCOUNT(Serializer.m_VanillaBiomes));
- a_Writer.AddIntArray ("MCSBiomes", (const int *)(Serializer.m_Biomes), ARRAYCOUNT(Serializer.m_Biomes));
- }
-
- // Save blockdata:
- a_Writer.BeginList("Sections", TAG_Compound);
- int SliceSizeBlock = cChunkDef::Width * cChunkDef::Width * 16;
- int SliceSizeNibble = SliceSizeBlock / 2;
- const char * BlockTypes = (const char *)(Serializer.m_BlockTypes);
- const char * BlockMetas = (const char *)(Serializer.m_BlockMetas);
- #ifdef DEBUG_SKYLIGHT
- const char * BlockLight = (const char *)(Serializer.m_BlockSkyLight);
- #else
- const char * BlockLight = (const char *)(Serializer.m_BlockLight);
- #endif
- const char * BlockSkyLight = (const char *)(Serializer.m_BlockSkyLight);
- for (int Y = 0; Y < 16; Y++)
- {
- a_Writer.BeginCompound("");
- a_Writer.AddByteArray("Blocks", BlockTypes + Y * SliceSizeBlock, SliceSizeBlock);
- a_Writer.AddByteArray("Data", BlockMetas + Y * SliceSizeNibble, SliceSizeNibble);
- a_Writer.AddByteArray("SkyLight", BlockSkyLight + Y * SliceSizeNibble, SliceSizeNibble);
- a_Writer.AddByteArray("BlockLight", BlockLight + Y * SliceSizeNibble, SliceSizeNibble);
- a_Writer.AddByte("Y", (unsigned char)Y);
- a_Writer.EndCompound();
- }
- a_Writer.EndList(); // "Sections"
-
- // Store the information that the lighting is valid.
- // For compatibility reason, the default is "invalid" (missing) - this means older data is re-lighted upon loading.
- if (Serializer.IsLightValid())
- {
- a_Writer.AddByte("MCSIsLightValid", 1);
- }
-
- a_Writer.EndCompound(); // "Level"
- return true;
-}
-
-
-
-
-
-cChunkDef::BiomeMap * cWSSAnvil::LoadVanillaBiomeMapFromNBT(cChunkDef::BiomeMap * a_BiomeMap, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_ByteArray))
- {
- return NULL;
- }
- if (a_NBT.GetDataLength(a_TagIdx) != 16 * 16)
- {
- // The biomes stored don't match in size
- return NULL;
- }
- const unsigned char * VanillaBiomeData = (const unsigned char *)(a_NBT.GetData(a_TagIdx));
- for (int i = 0; i < ARRAYCOUNT(*a_BiomeMap); i++)
- {
- if ((VanillaBiomeData)[i] == 0xff)
- {
- // Unassigned biomes
- return NULL;
- }
- (*a_BiomeMap)[i] = (EMCSBiome)(VanillaBiomeData[i]);
- }
- return a_BiomeMap;
-}
-
-
-
-
-
-cChunkDef::BiomeMap * cWSSAnvil::LoadBiomeMapFromNBT(cChunkDef::BiomeMap * a_BiomeMap, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_IntArray))
- {
- return NULL;
- }
- if (a_NBT.GetDataLength(a_TagIdx) != sizeof(*a_BiomeMap))
- {
- // The biomes stored don't match in size
- return NULL;
- }
- const int * BiomeData = (const int *)(a_NBT.GetData(a_TagIdx));
- for (int i = 0; i < ARRAYCOUNT(*a_BiomeMap); i++)
- {
- (*a_BiomeMap)[i] = (EMCSBiome)(ntohl(BiomeData[i]));
- if ((*a_BiomeMap)[i] == 0xff)
- {
- // Unassigned biomes
- return NULL;
- }
- }
- return a_BiomeMap;
-}
-
-
-
-
-
-void cWSSAnvil::LoadEntitiesFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_List))
- {
- return;
- }
-
- for (int Child = a_NBT.GetFirstChild(a_TagIdx); Child != -1; Child = a_NBT.GetNextSibling(Child))
- {
- if (a_NBT.GetType(Child) != TAG_Compound)
- {
- continue;
- }
- int sID = a_NBT.FindChildByName(Child, "id");
- if (sID < 0)
- {
- continue;
- }
- LoadEntityFromNBT(a_Entities, a_NBT, Child, a_NBT.GetData(sID), a_NBT.GetDataLength(sID));
- } // for Child - a_NBT[]
-}
-
-
-
-
-
-void cWSSAnvil::LoadBlockEntitiesFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx, BLOCKTYPE * a_BlockTypes, NIBBLETYPE * a_BlockMetas)
-{
- if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_List))
- {
- return;
- }
-
- for (int Child = a_NBT.GetFirstChild(a_TagIdx); Child != -1; Child = a_NBT.GetNextSibling(Child))
- {
- if (a_NBT.GetType(Child) != TAG_Compound)
- {
- continue;
- }
- int sID = a_NBT.FindChildByName(Child, "id");
- if (sID < 0)
- {
- continue;
- }
- if (strncmp(a_NBT.GetData(sID), "Chest", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadChestFromNBT(a_BlockEntities, a_NBT, Child);
- }
- else if (strncmp(a_NBT.GetData(sID), "Dropper", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadDropperFromNBT(a_BlockEntities, a_NBT, Child);
- }
- else if (strncmp(a_NBT.GetData(sID), "Furnace", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadFurnaceFromNBT(a_BlockEntities, a_NBT, Child, a_BlockTypes, a_BlockMetas);
- }
- else if (strncmp(a_NBT.GetData(sID), "Hopper", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadHopperFromNBT(a_BlockEntities, a_NBT, Child);
- }
- else if (strncmp(a_NBT.GetData(sID), "Music", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadNoteFromNBT(a_BlockEntities, a_NBT, Child);
- }
- else if (strncmp(a_NBT.GetData(sID), "RecordPlayer", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadJukeboxFromNBT(a_BlockEntities, a_NBT, Child);
- }
- else if (strncmp(a_NBT.GetData(sID), "Sign", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadSignFromNBT(a_BlockEntities, a_NBT, Child);
- }
- else if (strncmp(a_NBT.GetData(sID), "Trap", a_NBT.GetDataLength(sID)) == 0)
- {
- LoadDispenserFromNBT(a_BlockEntities, a_NBT, Child);
- }
- // TODO: Other block entities
- } // for Child - tag children
-}
-
-
-
-
-
-bool cWSSAnvil::LoadItemFromNBT(cItem & a_Item, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- int ID = a_NBT.FindChildByName(a_TagIdx, "id");
- if ((ID < 0) || (a_NBT.GetType(ID) != TAG_Short))
- {
- return false;
- }
- a_Item.m_ItemType = (ENUM_ITEM_ID)(a_NBT.GetShort(ID));
-
- int Damage = a_NBT.FindChildByName(a_TagIdx, "Damage");
- if ((Damage < 0) || (a_NBT.GetType(Damage) != TAG_Short))
- {
- return false;
- }
- a_Item.m_ItemDamage = a_NBT.GetShort(Damage);
-
- int Count = a_NBT.FindChildByName(a_TagIdx, "Count");
- if ((Count < 0) || (a_NBT.GetType(Count) != TAG_Byte))
- {
- return false;
- }
- a_Item.m_ItemCount = a_NBT.GetByte(Count);
-
- // Find the "tag" tag, used for enchantments and other extra data
- int TagTag = a_NBT.FindChildByName(a_TagIdx, "tag");
- if (TagTag <= 0)
- {
- // No extra data
- return true;
- }
-
- // Load enchantments:
- const char * EnchName = (a_Item.m_ItemType == E_ITEM_BOOK) ? "StoredEnchantments" : "ench";
- int EnchTag = a_NBT.FindChildByName(TagTag, EnchName);
- if (EnchTag > 0)
- {
- a_Item.m_Enchantments.ParseFromNBT(a_NBT, EnchTag);
- }
-
- return true;
-}
-
-
-
-
-
-void cWSSAnvil::LoadItemGridFromNBT(cItemGrid & a_ItemGrid, const cParsedNBT & a_NBT, int a_ItemsTagIdx, int a_SlotOffset)
-{
- int NumSlots = a_ItemGrid.GetNumSlots();
- for (int Child = a_NBT.GetFirstChild(a_ItemsTagIdx); Child != -1; Child = a_NBT.GetNextSibling(Child))
- {
- int SlotTag = a_NBT.FindChildByName(Child, "Slot");
- if ((SlotTag < 0) || (a_NBT.GetType(SlotTag) != TAG_Byte))
- {
- continue;
- }
- int SlotNum = (int)(a_NBT.GetByte(SlotTag)) - a_SlotOffset;
- if ((SlotNum < 0) || (SlotNum >= NumSlots))
- {
- // SlotNum outside of the range
- continue;
- }
- cItem Item;
- if (LoadItemFromNBT(Item, a_NBT, Child))
- {
- a_ItemGrid.SetSlot(SlotNum, Item);
- }
- } // for itr - ItemDefs[]
-}
-
-
-
-
-
-void cWSSAnvil::LoadChestFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- int Items = a_NBT.FindChildByName(a_TagIdx, "Items");
- if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List))
- {
- return; // Make it an empty chest - the chunk loader will provide an empty cChestEntity for this
- }
- std::auto_ptr<cChestEntity> Chest(new cChestEntity(x, y, z, m_World));
- LoadItemGridFromNBT(Chest->GetContents(), a_NBT, Items);
- a_BlockEntities.push_back(Chest.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadDispenserFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- int Items = a_NBT.FindChildByName(a_TagIdx, "Items");
- if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List))
- {
- return; // Make it an empty dispenser - the chunk loader will provide an empty cDispenserEntity for this
- }
- std::auto_ptr<cDispenserEntity> Dispenser(new cDispenserEntity(x, y, z, m_World));
- LoadItemGridFromNBT(Dispenser->GetContents(), a_NBT, Items);
- a_BlockEntities.push_back(Dispenser.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadDropperFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- int Items = a_NBT.FindChildByName(a_TagIdx, "Items");
- if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List))
- {
- return; // Make it an empty dropper - the chunk loader will provide an empty cDropperEntity for this
- }
- std::auto_ptr<cDropperEntity> Dropper(new cDropperEntity(x, y, z, m_World));
- LoadItemGridFromNBT(Dropper->GetContents(), a_NBT, Items);
- a_BlockEntities.push_back(Dropper.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadFurnaceFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx, BLOCKTYPE * a_BlockTypes, NIBBLETYPE * a_BlockMetas)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- int Items = a_NBT.FindChildByName(a_TagIdx, "Items");
- if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List))
- {
- return; // Make it an empty furnace - the chunk loader will provide an empty cFurnaceEntity for this
- }
-
- // Convert coords to relative:
- int RelX = x;
- int RelZ = z;
- int ChunkX, ChunkZ;
- cChunkDef::AbsoluteToRelative(RelX, y, RelZ, ChunkX, ChunkZ);
-
- // Create the furnace entity, with proper BlockType and BlockMeta info:
- BLOCKTYPE BlockType = cChunkDef::GetBlock(a_BlockTypes, RelX, y, RelZ);
- NIBBLETYPE BlockMeta = cChunkDef::GetNibble(a_BlockMetas, RelX, y, RelZ);
- std::auto_ptr<cFurnaceEntity> Furnace(new cFurnaceEntity(x, y, z, BlockType, BlockMeta, m_World));
-
- // Load slots:
- for (int Child = a_NBT.GetFirstChild(Items); Child != -1; Child = a_NBT.GetNextSibling(Child))
- {
- int Slot = a_NBT.FindChildByName(Child, "Slot");
- if ((Slot < 0) || (a_NBT.GetType(Slot) != TAG_Byte))
- {
- continue;
- }
- cItem Item;
- if (LoadItemFromNBT(Item, a_NBT, Child))
- {
- Furnace->SetSlot(a_NBT.GetByte(Slot), Item);
- }
- } // for itr - ItemDefs[]
-
- // Load burn time:
- int BurnTime = a_NBT.FindChildByName(a_TagIdx, "BurnTime");
- if (BurnTime >= 0)
- {
- Int16 bt = a_NBT.GetShort(BurnTime);
- // Anvil doesn't store the time that the fuel can burn. We simply "reset" the current value to be the 100%
- Furnace->SetBurnTimes(bt, 0);
- }
-
- // Load cook time:
- int CookTime = a_NBT.FindChildByName(a_TagIdx, "CookTime");
- if (CookTime >= 0)
- {
- Int16 ct = a_NBT.GetShort(CookTime);
- // Anvil doesn't store the time that an item takes to cook. We simply use the default - 10 seconds (200 ticks)
- Furnace->SetCookTimes(200, ct);
- }
-
- // Restart cooking:
- Furnace->ContinueCooking();
- a_BlockEntities.push_back(Furnace.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadHopperFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- int Items = a_NBT.FindChildByName(a_TagIdx, "Items");
- if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List))
- {
- return; // Make it an empty hopper - the chunk loader will provide an empty cHopperEntity for this
- }
- std::auto_ptr<cHopperEntity> Hopper(new cHopperEntity(x, y, z, m_World));
- LoadItemGridFromNBT(Hopper->GetContents(), a_NBT, Items);
- a_BlockEntities.push_back(Hopper.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadJukeboxFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- std::auto_ptr<cJukeboxEntity> Jukebox(new cJukeboxEntity(x, y, z, m_World));
- int Record = a_NBT.FindChildByName(a_TagIdx, "Record");
- if (Record >= 0)
- {
- Jukebox->SetRecord(a_NBT.GetInt(Record));
- }
- a_BlockEntities.push_back(Jukebox.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadNoteFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- std::auto_ptr<cNoteEntity> Note(new cNoteEntity(x, y, z, m_World));
- int note = a_NBT.FindChildByName(a_TagIdx, "note");
- if (note >= 0)
- {
- Note->SetPitch(a_NBT.GetByte(note));
- }
- a_BlockEntities.push_back(Note.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadSignFromNBT(cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- ASSERT(a_NBT.GetType(a_TagIdx) == TAG_Compound);
- int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, a_TagIdx, x, y, z))
- {
- return;
- }
- std::auto_ptr<cSignEntity> Sign(new cSignEntity(E_BLOCK_SIGN_POST, x, y, z, m_World));
-
- int currentLine = a_NBT.FindChildByName(a_TagIdx, "Text1");
- if (currentLine >= 0)
- {
- Sign->SetLine(0, a_NBT.GetString(currentLine));
- }
-
- currentLine = a_NBT.FindChildByName(a_TagIdx, "Text2");
- if (currentLine >= 0)
- {
- Sign->SetLine(1, a_NBT.GetString(currentLine));
- }
-
- currentLine = a_NBT.FindChildByName(a_TagIdx, "Text3");
- if (currentLine >= 0)
- {
- Sign->SetLine(2, a_NBT.GetString(currentLine));
- }
-
- currentLine = a_NBT.FindChildByName(a_TagIdx, "Text4");
- if (currentLine >= 0)
- {
- Sign->SetLine(3, a_NBT.GetString(currentLine));
- }
-
- a_BlockEntities.push_back(Sign.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadEntityFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_EntityTagIdx, const char * a_IDTag, int a_IDTagLength)
-{
- if (strncmp(a_IDTag, "Boat", a_IDTagLength) == 0)
- {
- LoadBoatFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "FallingBlock", a_IDTagLength) == 0)
- {
- LoadFallingBlockFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "Minecart", a_IDTagLength) == 0)
- {
- // It is a minecart, old style, find out the type:
- int TypeTag = a_NBT.FindChildByName(a_EntityTagIdx, "Type");
- if ((TypeTag < 0) || (a_NBT.GetType(TypeTag) != TAG_Int))
- {
- return;
- }
- switch (a_NBT.GetInt(TypeTag))
- {
- case 0: LoadMinecartRFromNBT(a_Entities, a_NBT, a_EntityTagIdx); break; // Rideable minecart
- case 1: LoadMinecartCFromNBT(a_Entities, a_NBT, a_EntityTagIdx); break; // Minecart with chest
- case 2: LoadMinecartFFromNBT(a_Entities, a_NBT, a_EntityTagIdx); break; // Minecart with furnace
- case 3: LoadMinecartTFromNBT(a_Entities, a_NBT, a_EntityTagIdx); break; // Minecart with TNT
- case 4: LoadMinecartHFromNBT(a_Entities, a_NBT, a_EntityTagIdx); break; // Minecart with Hopper
- }
- }
- else if (strncmp(a_IDTag, "MinecartRideable", a_IDTagLength) == 0)
- {
- LoadMinecartRFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "MinecartChest", a_IDTagLength) == 0)
- {
- LoadMinecartCFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "MinecartFurnace", a_IDTagLength) == 0)
- {
- LoadMinecartFFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "MinecartTNT", a_IDTagLength) == 0)
- {
- LoadMinecartTFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "MinecartHopper", a_IDTagLength) == 0)
- {
- LoadMinecartHFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "Item", a_IDTagLength) == 0)
- {
- LoadPickupFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "Arrow", a_IDTagLength) == 0)
- {
- LoadArrowFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "Snowball", a_IDTagLength) == 0)
- {
- LoadSnowballFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "Egg", a_IDTagLength) == 0)
- {
- LoadEggFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "Fireball", a_IDTagLength) == 0)
- {
- LoadFireballFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "SmallFireball", a_IDTagLength) == 0)
- {
- LoadFireChargeFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- else if (strncmp(a_IDTag, "ThrownEnderpearl", a_IDTagLength) == 0)
- {
- LoadThrownEnderpearlFromNBT(a_Entities, a_NBT, a_EntityTagIdx);
- }
- // TODO: other entities
-}
-
-
-
-
-
-void cWSSAnvil::LoadBoatFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cBoat> Boat(new cBoat(0, 0, 0));
- if (!LoadEntityBaseFromNBT(*Boat.get(), a_NBT, a_TagIdx))
- {
- return;
- }
- a_Entities.push_back(Boat.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadFallingBlockFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- // TODO
-}
-
-
-
-
-
-void cWSSAnvil::LoadMinecartRFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cEmptyMinecart> Minecart(new cEmptyMinecart(0, 0, 0));
- if (!LoadEntityBaseFromNBT(*Minecart.get(), a_NBT, a_TagIdx))
- {
- return;
- }
- a_Entities.push_back(Minecart.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadMinecartCFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- int Items = a_NBT.FindChildByName(a_TagIdx, "Items");
- if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List))
- {
- return; // Make it an empty chest - the chunk loader will provide an empty cChestEntity for this
- }
- std::auto_ptr<cMinecartWithChest> Minecart(new cMinecartWithChest(0, 0, 0));
- if (!LoadEntityBaseFromNBT(*Minecart.get(), a_NBT, a_TagIdx))
- {
- return;
- }
- for (int Child = a_NBT.GetFirstChild(Items); Child != -1; Child = a_NBT.GetNextSibling(Child))
- {
- int Slot = a_NBT.FindChildByName(Child, "Slot");
- if ((Slot < 0) || (a_NBT.GetType(Slot) != TAG_Byte))
- {
- continue;
- }
- cItem Item;
- if (LoadItemFromNBT(Item, a_NBT, Child))
- {
- Minecart->SetSlot(a_NBT.GetByte(Slot), Item);
- }
- } // for itr - ItemDefs[]
- a_Entities.push_back(Minecart.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadMinecartFFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cMinecartWithFurnace> Minecart(new cMinecartWithFurnace(0, 0, 0));
- if (!LoadEntityBaseFromNBT(*Minecart.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // TODO: Load the Push and Fuel tags
-
- a_Entities.push_back(Minecart.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadMinecartTFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cMinecartWithTNT> Minecart(new cMinecartWithTNT(0, 0, 0));
- if (!LoadEntityBaseFromNBT(*Minecart.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // TODO: Everything to do with TNT carts
-
- a_Entities.push_back(Minecart.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadMinecartHFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cMinecartWithHopper> Minecart(new cMinecartWithHopper(0, 0, 0));
- if (!LoadEntityBaseFromNBT(*Minecart.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // TODO: Everything to do with hopper carts
-
- a_Entities.push_back(Minecart.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadPickupFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- int ItemTag = a_NBT.FindChildByName(a_TagIdx, "Item");
- if ((ItemTag < 0) || (a_NBT.GetType(ItemTag) != TAG_Compound))
- {
- return;
- }
- cItem Item;
- if (!LoadItemFromNBT(Item, a_NBT, ItemTag))
- {
- return;
- }
- std::auto_ptr<cPickup> Pickup(new cPickup(0, 0, 0, Item, false)); // Pickup delay doesn't matter, just say false
- if (!LoadEntityBaseFromNBT(*Pickup.get(), a_NBT, a_TagIdx))
- {
- return;
- }
- a_Entities.push_back(Pickup.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadArrowFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cArrowEntity> Arrow(new cArrowEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0)));
- if (!LoadProjectileBaseFromNBT(*Arrow.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // Load pickup state:
- int PickupIdx = a_NBT.FindChildByName(a_TagIdx, "pickup");
- if (PickupIdx > 0)
- {
- Arrow->SetPickupState((cArrowEntity::ePickupState)a_NBT.GetByte(PickupIdx));
- }
- else
- {
- // Try the older "player" tag:
- int PlayerIdx = a_NBT.FindChildByName(a_TagIdx, "player");
- if (PlayerIdx > 0)
- {
- Arrow->SetPickupState((a_NBT.GetByte(PlayerIdx) == 0) ? cArrowEntity::psNoPickup : cArrowEntity::psInSurvivalOrCreative);
- }
- }
-
- // Load damage:
- int DamageIdx = a_NBT.FindChildByName(a_TagIdx, "damage");
- if (DamageIdx > 0)
- {
- Arrow->SetDamageCoeff(a_NBT.GetDouble(DamageIdx));
- }
-
- // Store the new arrow in the entities list:
- a_Entities.push_back(Arrow.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadSnowballFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cThrownSnowballEntity> Snowball(new cThrownSnowballEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0)));
- if (!LoadProjectileBaseFromNBT(*Snowball.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // Store the new snowball in the entities list:
- a_Entities.push_back(Snowball.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadEggFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cThrownEggEntity> Egg(new cThrownEggEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0)));
- if (!LoadProjectileBaseFromNBT(*Egg.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // Store the new egg in the entities list:
- a_Entities.push_back(Egg.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadFireballFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cGhastFireballEntity> Fireball(new cGhastFireballEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0)));
- if (!LoadProjectileBaseFromNBT(*Fireball.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // Store the new fireball in the entities list:
- a_Entities.push_back(Fireball.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadFireChargeFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cFireChargeEntity> FireCharge(new cFireChargeEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0)));
- if (!LoadProjectileBaseFromNBT(*FireCharge.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // Store the new FireCharge in the entities list:
- a_Entities.push_back(FireCharge.release());
-}
-
-
-
-
-
-void cWSSAnvil::LoadThrownEnderpearlFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- std::auto_ptr<cThrownEnderPearlEntity> Enderpearl(new cThrownEnderPearlEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0)));
- if (!LoadProjectileBaseFromNBT(*Enderpearl.get(), a_NBT, a_TagIdx))
- {
- return;
- }
-
- // Store the new enderpearl in the entities list:
- a_Entities.push_back(Enderpearl.release());
-}
-
-
-
-
-
-bool cWSSAnvil::LoadEntityBaseFromNBT(cEntity & a_Entity, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- double Pos[3];
- if (!LoadDoublesListFromNBT(Pos, 3, a_NBT, a_NBT.FindChildByName(a_TagIdx, "Pos")))
- {
- return false;
- }
- a_Entity.SetPosition(Pos[0], Pos[1], Pos[2]);
-
- double Speed[3];
- if (!LoadDoublesListFromNBT(Speed, 3, a_NBT, a_NBT.FindChildByName(a_TagIdx, "Motion")))
- {
- return false;
- }
- a_Entity.SetSpeed(Speed[0], Speed[1], Speed[2]);
-
- double Rotation[3];
- if (!LoadDoublesListFromNBT(Rotation, 2, a_NBT, a_NBT.FindChildByName(a_TagIdx, "Rotation")))
- {
- return false;
- }
- a_Entity.SetRotation(Rotation[0]);
- a_Entity.SetRoll (Rotation[1]);
-
- return true;
-}
-
-
-
-
-
-bool cWSSAnvil::LoadProjectileBaseFromNBT(cProjectileEntity & a_Entity, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- if (!LoadEntityBaseFromNBT(a_Entity, a_NBT, a_TagIdx))
- {
- return false;
- }
-
- bool IsInGround = false;
- int InGroundIdx = a_NBT.FindChildByName(a_TagIdx, "inGround");
- if (InGroundIdx > 0)
- {
- IsInGround = (a_NBT.GetByte(InGroundIdx) != 0);
- }
- a_Entity.SetIsInGround(IsInGround);
-
- // TODO: Load inTile, TileCoords
-
- return true;
-}
-
-
-
-
-
-bool cWSSAnvil::LoadDoublesListFromNBT(double * a_Doubles, int a_NumDoubles, const cParsedNBT & a_NBT, int a_TagIdx)
-{
- if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_List) || (a_NBT.GetChildrenType(a_TagIdx) != TAG_Double))
- {
- return false;
- }
- int idx = 0;
- for (int Tag = a_NBT.GetFirstChild(a_TagIdx); (Tag > 0) && (idx < a_NumDoubles); Tag = a_NBT.GetNextSibling(Tag), ++idx)
- {
- a_Doubles[idx] = a_NBT.GetDouble(Tag);
- } // for Tag - PosTag[]
- return (idx == a_NumDoubles); // Did we read enough doubles?
-}
-
-
-
-
-
-bool cWSSAnvil::GetBlockEntityNBTPos(const cParsedNBT & a_NBT, int a_TagIdx, int & a_X, int & a_Y, int & a_Z)
-{
- int x = a_NBT.FindChildByName(a_TagIdx, "x");
- if ((x < 0) || (a_NBT.GetType(x) != TAG_Int))
- {
- return false;
- }
- int y = a_NBT.FindChildByName(a_TagIdx, "y");
- if ((y < 0) || (a_NBT.GetType(y) != TAG_Int))
- {
- return false;
- }
- int z = a_NBT.FindChildByName(a_TagIdx, "z");
- if ((z < 0) || (a_NBT.GetType(z) != TAG_Int))
- {
- return false;
- }
- a_X = a_NBT.GetInt(x);
- a_Y = a_NBT.GetInt(y);
- a_Z = a_NBT.GetInt(z);
- return true;
-}
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cWSSAnvil::cMCAFile:
-
-cWSSAnvil::cMCAFile::cMCAFile(const AString & a_FileName, int a_RegionX, int a_RegionZ) :
- m_RegionX(a_RegionX),
- m_RegionZ(a_RegionZ),
- m_FileName(a_FileName)
-{
-}
-
-
-
-
-
-bool cWSSAnvil::cMCAFile::OpenFile(bool a_IsForReading)
-{
- if (m_File.IsOpen())
- {
- // Already open
- return true;
- }
-
- if (a_IsForReading)
- {
- if (!cFile::Exists(m_FileName))
- {
- // We want to read and the file doesn't exist. Fail.
- return false;
- }
- }
-
- if (!m_File.Open(m_FileName, cFile::fmReadWrite))
- {
- // The file failed to open
- return false;
- }
-
- // Load the header:
- if (m_File.Read(m_Header, sizeof(m_Header)) != sizeof(m_Header))
- {
- // Cannot read the header - perhaps the file has just been created?
- // Try writing a NULL header (both chunk offsets and timestamps):
- memset(m_Header, 0, sizeof(m_Header));
- if (
- (m_File.Write(m_Header, sizeof(m_Header)) != sizeof(m_Header)) || // Real header - chunk offsets
- (m_File.Write(m_Header, sizeof(m_Header)) != sizeof(m_Header)) // Bogus data for the chunk timestamps
- )
- {
- LOGWARNING("Cannot process MCA header in file \"%s\", chunks in that file will be lost", m_FileName.c_str());
- m_File.Close();
- return false;
- }
- }
- return true;
-}
-
-
-
-
-
-bool cWSSAnvil::cMCAFile::GetChunkData(const cChunkCoords & a_Chunk, AString & a_Data)
-{
- if (!OpenFile(true))
- {
- return false;
- }
-
- int LocalX = a_Chunk.m_ChunkX % 32;
- if (LocalX < 0)
- {
- LocalX = 32 + LocalX;
- }
- int LocalZ = a_Chunk.m_ChunkZ % 32;
- if (LocalZ < 0)
- {
- LocalZ = 32 + LocalZ;
- }
- unsigned ChunkLocation = ntohl(m_Header[LocalX + 32 * LocalZ]);
- unsigned ChunkOffset = ChunkLocation >> 8;
-
- m_File.Seek(ChunkOffset * 4096);
-
- int ChunkSize = 0;
- if (m_File.Read(&ChunkSize, 4) != 4)
- {
- return false;
- }
- ChunkSize = ntohl(ChunkSize);
- char CompressionType = 0;
- if (m_File.Read(&CompressionType, 1) != 1)
- {
- return false;
- }
- if (CompressionType != 2)
- {
- // Chunk is in an unknown compression
- return false;
- }
- ChunkSize--;
-
- // HACK: This depends on the internal knowledge that AString's data() function returns the internal buffer directly
- a_Data.assign(ChunkSize, '\0');
- return (m_File.Read((void *)a_Data.data(), ChunkSize) == ChunkSize);
-}
-
-
-
-
-
-bool cWSSAnvil::cMCAFile::SetChunkData(const cChunkCoords & a_Chunk, const AString & a_Data)
-{
- if (!OpenFile(false))
- {
- LOGWARNING("Cannot save chunk [%d, %d], opening file \"%s\" failed", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, GetFileName().c_str());
- return false;
- }
-
- int LocalX = a_Chunk.m_ChunkX % 32;
- if (LocalX < 0)
- {
- LocalX = 32 + LocalX;
- }
- int LocalZ = a_Chunk.m_ChunkZ % 32;
- if (LocalZ < 0)
- {
- LocalZ = 32 + LocalZ;
- }
-
- unsigned ChunkSector = FindFreeLocation(LocalX, LocalZ, a_Data);
-
- // Store the chunk data:
- m_File.Seek(ChunkSector * 4096);
- unsigned ChunkSize = htonl(a_Data.size() + 1);
- if (m_File.Write(&ChunkSize, 4) != 4)
- {
- LOGWARNING("Cannot save chunk [%d, %d], writing(1) data to file \"%s\" failed", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, GetFileName().c_str());
- return false;
- }
- char CompressionType = 2;
- if (m_File.Write(&CompressionType, 1) != 1)
- {
- LOGWARNING("Cannot save chunk [%d, %d], writing(2) data to file \"%s\" failed", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, GetFileName().c_str());
- return false;
- }
- if (m_File.Write(a_Data.data(), a_Data.size()) != (int)(a_Data.size()))
- {
- LOGWARNING("Cannot save chunk [%d, %d], writing(3) data to file \"%s\" failed", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, GetFileName().c_str());
- return false;
- }
-
- // Store the header:
- ChunkSize = (a_Data.size() + MCA_CHUNK_HEADER_LENGTH + 4095) / 4096; // Round data size *up* to nearest 4KB sector, make it a sector number
- ASSERT(ChunkSize < 256);
- m_Header[LocalX + 32 * LocalZ] = htonl((ChunkSector << 8) | ChunkSize);
- if (m_File.Seek(0) < 0)
- {
- LOGWARNING("Cannot save chunk [%d, %d], seeking in file \"%s\" failed", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, GetFileName().c_str());
- return false;
- }
- if (m_File.Write(m_Header, sizeof(m_Header)) != sizeof(m_Header))
- {
- LOGWARNING("Cannot save chunk [%d, %d], writing header to file \"%s\" failed", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, GetFileName().c_str());
- return false;
- }
-
- return true;
-}
-
-
-
-
-
-unsigned cWSSAnvil::cMCAFile::FindFreeLocation(int a_LocalX, int a_LocalZ, const AString & a_Data)
-{
- // See if it fits the current location:
- unsigned ChunkLocation = ntohl(m_Header[a_LocalX + 32 * a_LocalZ]);
- unsigned ChunkLen = ChunkLocation & 0xff;
- if (a_Data.size() + MCA_CHUNK_HEADER_LENGTH <= (ChunkLen * 4096))
- {
- return ChunkLocation >> 8;
- }
-
- // Doesn't fit, append to the end of file (we're wasting a lot of space, TODO: fix this later)
- unsigned MaxLocation = 2 << 8; // Minimum sector is #2 - after the headers
- for (int i = 0; i < ARRAYCOUNT(m_Header); i++)
- {
- ChunkLocation = ntohl(m_Header[i]);
- ChunkLocation = ChunkLocation + ((ChunkLocation & 0xff) << 8); // Add the number of sectors used; don't care about the 4th byte
- if (MaxLocation < ChunkLocation)
- {
- MaxLocation = ChunkLocation;
- }
- } // for i - m_Header[]
- return MaxLocation >> 8;
-}
-
-
-
-
diff --git a/source/WorldStorage/WSSAnvil.h b/source/WorldStorage/WSSAnvil.h
deleted file mode 100644
index 7685d2236..000000000
--- a/source/WorldStorage/WSSAnvil.h
+++ /dev/null
@@ -1,184 +0,0 @@
-
-// WSSAnvil.h
-
-// Interfaces to the cWSSAnvil class representing the Anvil world storage scheme
-
-
-
-
-#pragma once
-
-#include "WorldStorage.h"
-#include "FastNBT.h"
-
-
-
-
-
-// fwd: ItemGrid.h
-class cItemGrid;
-
-class cProjectileEntity;
-
-
-
-
-
-enum
-{
- /// Maximum number of chunks in an MCA file - also the count of the header items
- MCA_MAX_CHUNKS = 32 * 32,
-
- /// The MCA header is 8 KiB
- MCA_HEADER_SIZE = MCA_MAX_CHUNKS * 8,
-
- /// There are 5 bytes of header in front of each chunk
- MCA_CHUNK_HEADER_LENGTH = 5,
-} ;
-
-
-
-
-
-class cWSSAnvil :
- public cWSSchema
-{
- typedef cWSSchema super;
-
-public:
-
- cWSSAnvil(cWorld * a_World);
- virtual ~cWSSAnvil();
-
-protected:
-
- class cMCAFile
- {
- public:
-
- cMCAFile(const AString & a_FileName, int a_RegionX, int a_RegionZ);
-
- bool GetChunkData (const cChunkCoords & a_Chunk, AString & a_Data);
- bool SetChunkData (const cChunkCoords & a_Chunk, const AString & a_Data);
- bool EraseChunkData(const cChunkCoords & a_Chunk);
-
- int GetRegionX (void) const {return m_RegionX; }
- int GetRegionZ (void) const {return m_RegionZ; }
- const AString & GetFileName(void) const {return m_FileName; }
-
- protected:
-
- int m_RegionX;
- int m_RegionZ;
- cFile m_File;
- AString m_FileName;
-
- // The header, copied from the file so we don't have to seek to it all the time
- // First 1024 entries are chunk locations - the 3 + 1 byte sector-offset and sector-count
- unsigned m_Header[MCA_MAX_CHUNKS];
-
- // Chunk timestamps, following the chunk headers, are unused by MCS
-
- /// Finds a free location large enough to hold a_Data. Gets a hint of the chunk coords, places the data there if it fits. Returns the sector number.
- unsigned FindFreeLocation(int a_LocalX, int a_LocalZ, const AString & a_Data);
-
- /// Opens a MCA file either for a Read operation (fails if doesn't exist) or for a Write operation (creates new if not found)
- bool OpenFile(bool a_IsForReading);
- } ;
- typedef std::list<cMCAFile *> cMCAFiles;
-
- cCriticalSection m_CS;
- cMCAFiles m_Files; // a MRU cache of MCA files
-
- /// Gets chunk data from the correct file; locks file CS as needed
- bool GetChunkData(const cChunkCoords & a_Chunk, AString & a_Data);
-
- /// Sets chunk data into the correct file; locks file CS as needed
- bool SetChunkData(const cChunkCoords & a_Chunk, const AString & a_Data);
-
- /// Loads the chunk from the data (no locking needed)
- bool LoadChunkFromData(const cChunkCoords & a_Chunk, const AString & a_Data);
-
- /// Saves the chunk into datastream (no locking needed)
- bool SaveChunkToData(const cChunkCoords & a_Chunk, AString & a_Data);
-
- /// Loads the chunk from NBT data (no locking needed)
- bool LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT & a_NBT);
-
- /// Saves the chunk into NBT data using a_Writer; returns true on success
- bool SaveChunkToNBT(const cChunkCoords & a_Chunk, cFastNBTWriter & a_Writer);
-
- /// Loads the chunk's biome map from vanilla-format; returns a_BiomeMap if biomes present and valid, NULL otherwise
- cChunkDef::BiomeMap * LoadVanillaBiomeMapFromNBT(cChunkDef::BiomeMap * a_BiomeMap, const cParsedNBT & a_NBT, int a_TagIdx);
-
- /// Loads the chunk's biome map from MCS format; returns a_BiomeMap if biomes present and valid, NULL otherwise
- cChunkDef::BiomeMap * LoadBiomeMapFromNBT(cChunkDef::BiomeMap * a_BiomeMap, const cParsedNBT & a_NBT, int a_TagIdx);
-
- /// Loads the chunk's entities from NBT data (a_Tag is the Level\\Entities list tag; may be -1)
- void LoadEntitiesFromNBT(cEntityList & a_Entitites, const cParsedNBT & a_NBT, int a_Tag);
-
- /// Loads the chunk's BlockEntities from NBT data (a_Tag is the Level\\TileEntities list tag; may be -1)
- void LoadBlockEntitiesFromNBT(cBlockEntityList & a_BlockEntitites, const cParsedNBT & a_NBT, int a_Tag, BLOCKTYPE * a_BlockTypes, NIBBLETYPE * a_BlockMetas);
-
- /// Loads a cItem contents from the specified NBT tag; returns true if successful. Doesn't load the Slot tag
- bool LoadItemFromNBT(cItem & a_Item, const cParsedNBT & a_NBT, int a_TagIdx);
-
- /** Loads contentents of an Items[] list tag into a cItemGrid
- ItemGrid begins at the specified slot offset
- Slots outside the ItemGrid range are ignored
- */
- void LoadItemGridFromNBT(cItemGrid & a_ItemGrid, const cParsedNBT & a_NBT, int a_ItemsTagIdx, int s_SlotOffset = 0);
-
- void LoadChestFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadDispenserFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadDropperFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadFurnaceFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx, BLOCKTYPE * a_BlockTypes, NIBBLETYPE * a_BlockMetas);
- void LoadHopperFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadJukeboxFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadNoteFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadSignFromNBT (cBlockEntityList & a_BlockEntities, const cParsedNBT & a_NBT, int a_TagIdx);
-
- void LoadEntityFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_EntityTagIdx, const char * a_IDTag, int a_IDTagLength);
-
- void LoadBoatFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadFallingBlockFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadMinecartRFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadMinecartCFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadMinecartFFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadMinecartTFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadMinecartHFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadPickupFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadArrowFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadSnowballFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadEggFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadFireballFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadFireChargeFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadThrownEnderpearlFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
-
- /// Loads entity common data from the NBT compound; returns true if successful
- bool LoadEntityBaseFromNBT(cEntity & a_Entity, const cParsedNBT & a_NBT, int a_TagIdx);
-
- /// Loads projectile common data from the NBT compound; returns true if successful
- bool LoadProjectileBaseFromNBT(cProjectileEntity & a_Entity, const cParsedNBT & a_NBT, int a_TagIx);
-
- /// Loads an array of doubles of the specified length from the specified NBT list tag a_TagIdx; returns true if successful
- bool LoadDoublesListFromNBT(double * a_Doubles, int a_NumDoubles, const cParsedNBT & a_NBT, int a_TagIdx);
-
- /// Helper function for extracting the X, Y, and Z int subtags of a NBT compound; returns true if successful
- bool GetBlockEntityNBTPos(const cParsedNBT & a_NBT, int a_TagIdx, int & a_X, int & a_Y, int & a_Z);
-
- /// Gets the correct MCA file either from cache or from disk, manages the m_MCAFiles cache; assumes m_CS is locked
- cMCAFile * LoadMCAFile(const cChunkCoords & a_Chunk);
-
- /// Copies a_Length bytes of data from the specified NBT Tag's Child into the a_Destination buffer
- void CopyNBTData(const cParsedNBT & a_NBT, int a_Tag, const AString & a_ChildName, char * a_Destination, int a_Length);
-
- // cWSSchema overrides:
- virtual bool LoadChunk(const cChunkCoords & a_Chunk) override;
- virtual bool SaveChunk(const cChunkCoords & a_Chunk) override;
- virtual const AString GetName(void) const override {return "anvil"; }
-} ;
-
-
-
-
diff --git a/source/WorldStorage/WSSCompact.cpp b/source/WorldStorage/WSSCompact.cpp
deleted file mode 100644
index 694f3ed1d..000000000
--- a/source/WorldStorage/WSSCompact.cpp
+++ /dev/null
@@ -1,1009 +0,0 @@
-
-// WSSCompact.cpp
-
-// Interfaces to the cWSSCompact class representing the "compact" storage schema (PAK-files)
-
-#include "Globals.h"
-#include "WSSCompact.h"
-#include "../World.h"
-#include "zlib.h"
-#include <json/json.h>
-#include "../StringCompression.h"
-#include "../BlockEntities/ChestEntity.h"
-#include "../BlockEntities/DispenserEntity.h"
-#include "../BlockEntities/FurnaceEntity.h"
-#include "../BlockEntities/JukeboxEntity.h"
-#include "../BlockEntities/NoteEntity.h"
-#include "../BlockEntities/SignEntity.h"
-
-
-
-
-
-#pragma pack(push, 1)
-/// The chunk header, as stored in the file:
-struct cWSSCompact::sChunkHeader
-{
- int m_ChunkX;
- int m_ChunkZ;
- int m_CompressedSize;
- int m_UncompressedSize;
-} ;
-#pragma pack(pop)
-
-
-
-
-
-/// The maximum number of PAK files that are cached
-const int MAX_PAK_FILES = 16;
-
-/// The maximum number of unsaved chunks before the cPAKFile saves them to disk
-const int MAX_DIRTY_CHUNKS = 16;
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cJsonChunkSerializer:
-
-cJsonChunkSerializer::cJsonChunkSerializer(void) :
- m_HasJsonData(false)
-{
-}
-
-
-
-
-
-void cJsonChunkSerializer::Entity(cEntity * a_Entity)
-{
- // TODO: a_Entity->SaveToJson(m_Root);
-}
-
-
-
-
-
-void cJsonChunkSerializer::BlockEntity(cBlockEntity * a_BlockEntity)
-{
- const char * SaveInto = NULL;
- switch (a_BlockEntity->GetBlockType())
- {
- case E_BLOCK_CHEST: SaveInto = "Chests"; break;
- case E_BLOCK_DISPENSER: SaveInto = "Dispensers"; break;
- case E_BLOCK_DROPPER: SaveInto = "Droppers"; break;
- case E_BLOCK_FURNACE: SaveInto = "Furnaces"; break;
- case E_BLOCK_SIGN_POST: SaveInto = "Signs"; break;
- case E_BLOCK_WALLSIGN: SaveInto = "Signs"; break;
- case E_BLOCK_NOTE_BLOCK: SaveInto = "Notes"; break;
- case E_BLOCK_JUKEBOX: SaveInto = "Jukeboxes"; break;
-
- default:
- {
- ASSERT(!"Unhandled blocktype in BlockEntities list while saving to JSON");
- break;
- }
- } // switch (BlockEntity->GetBlockType())
- if (SaveInto == NULL)
- {
- return;
- }
-
- Json::Value val;
- a_BlockEntity->SaveToJson(val);
- m_Root[SaveInto].append(val);
- m_HasJsonData = true;
-}
-
-
-
-
-
-bool cJsonChunkSerializer::LightIsValid(bool a_IsLightValid)
-{
- if (!a_IsLightValid)
- {
- return false;
- }
- m_Root["IsLightValid"] = true;
- m_HasJsonData = true;
- return true;
-}
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cWSSCompact:
-
-cWSSCompact::~cWSSCompact()
-{
- for (cPAKFiles::iterator itr = m_PAKFiles.begin(); itr != m_PAKFiles.end(); ++itr)
- {
- delete *itr;
- }
-}
-
-
-
-
-
-bool cWSSCompact::LoadChunk(const cChunkCoords & a_Chunk)
-{
- AString ChunkData;
- int UncompressedSize = 0;
- if (!GetChunkData(a_Chunk, UncompressedSize, ChunkData))
- {
- // The reason for failure is already printed in GetChunkData()
- return false;
- }
-
- return LoadChunkFromData(a_Chunk, UncompressedSize, ChunkData, m_World);
-}
-
-
-
-
-
-bool cWSSCompact::SaveChunk(const cChunkCoords & a_Chunk)
-{
- cCSLock Lock(m_CS);
-
- cPAKFile * f = LoadPAKFile(a_Chunk);
- if (f == NULL)
- {
- // For some reason we couldn't locate the file
- LOG("Cannot locate a proper PAK file for chunk [%d, %d]", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
- return false;
- }
- return f->SaveChunk(a_Chunk, m_World);
-}
-
-
-
-
-
-cWSSCompact::cPAKFile * cWSSCompact::LoadPAKFile(const cChunkCoords & a_Chunk)
-{
- // ASSUMES that m_CS has been locked
-
- // We need to retain this weird conversion code, because some edge chunks are in the wrong PAK file
- const int LayerX = FAST_FLOOR_DIV(a_Chunk.m_ChunkX, 32);
- const int LayerZ = FAST_FLOOR_DIV(a_Chunk.m_ChunkZ, 32);
-
- // Is it already cached?
- for (cPAKFiles::iterator itr = m_PAKFiles.begin(); itr != m_PAKFiles.end(); ++itr)
- {
- if (((*itr) != NULL) && ((*itr)->GetLayerX() == LayerX) && ((*itr)->GetLayerZ() == LayerZ))
- {
- // Move the file to front and return it:
- cPAKFile * f = *itr;
- if (itr != m_PAKFiles.begin())
- {
- m_PAKFiles.erase(itr);
- m_PAKFiles.push_front(f);
- }
- return f;
- }
- }
-
- // Load it anew:
- AString FileName;
- Printf(FileName, "%s/X%i_Z%i.pak", m_World->GetName().c_str(), LayerX, LayerZ );
- cPAKFile * f = new cPAKFile(FileName, LayerX, LayerZ);
- if (f == NULL)
- {
- return NULL;
- }
- m_PAKFiles.push_front(f);
-
- // If there are too many PAK files cached, delete the last one used:
- if (m_PAKFiles.size() > MAX_PAK_FILES)
- {
- delete m_PAKFiles.back();
- m_PAKFiles.pop_back();
- }
- return f;
-}
-
-
-
-
-
-bool cWSSCompact::GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data)
-{
- cCSLock Lock(m_CS);
- cPAKFile * f = LoadPAKFile(a_Chunk);
- if (f == NULL)
- {
- return false;
- }
- return f->GetChunkData(a_Chunk, a_UncompressedSize, a_Data);
-}
-
-
-
-
-
-/*
-// TODO: Rewrite saving to use the same principles as loading
-bool cWSSCompact::SetChunkData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data)
-{
- cCSLock Lock(m_CS);
- cPAKFile * f = LoadPAKFile(a_Chunk);
- if (f == NULL)
- {
- return false;
- }
- return f->SetChunkData(a_Chunk, a_UncompressedSize, a_Data);
-}
-*/
-
-
-
-
-
-bool cWSSCompact::EraseChunkData(const cChunkCoords & a_Chunk)
-{
- cCSLock Lock(m_CS);
- cPAKFile * f = LoadPAKFile(a_Chunk);
- if (f == NULL)
- {
- return false;
- }
- return f->EraseChunkData(a_Chunk);
-}
-
-
-
-
-
-void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_Entities, cBlockEntityList & a_BlockEntities, cWorld * a_World)
-{
- // Load chests
- Json::Value AllChests = a_Value.get("Chests", Json::nullValue);
- if (!AllChests.empty())
- {
- for (Json::Value::iterator itr = AllChests.begin(); itr != AllChests.end(); ++itr )
- {
- Json::Value & Chest = *itr;
- cChestEntity * ChestEntity = new cChestEntity(0,0,0, a_World);
- if (!ChestEntity->LoadFromJson( Chest ) )
- {
- LOGERROR("ERROR READING CHEST FROM JSON!" );
- delete ChestEntity;
- }
- else
- {
- a_BlockEntities.push_back( ChestEntity );
- }
- } // for itr - AllChests[]
- }
-
- // Load dispensers
- Json::Value AllDispensers = a_Value.get("Dispensers", Json::nullValue);
- if( !AllDispensers.empty() )
- {
- for( Json::Value::iterator itr = AllDispensers.begin(); itr != AllDispensers.end(); ++itr )
- {
- Json::Value & Dispenser = *itr;
- cDispenserEntity * DispenserEntity = new cDispenserEntity(0,0,0, a_World);
- if( !DispenserEntity->LoadFromJson( Dispenser ) )
- {
- LOGERROR("ERROR READING DISPENSER FROM JSON!" );
- delete DispenserEntity;
- }
- else
- {
- a_BlockEntities.push_back( DispenserEntity );
- }
- } // for itr - AllDispensers[]
- }
-
- // Load furnaces
- Json::Value AllFurnaces = a_Value.get("Furnaces", Json::nullValue);
- if( !AllFurnaces.empty() )
- {
- for( Json::Value::iterator itr = AllFurnaces.begin(); itr != AllFurnaces.end(); ++itr )
- {
- Json::Value & Furnace = *itr;
- // TODO: The block type and meta aren't correct, there's no way to get them here
- cFurnaceEntity * FurnaceEntity = new cFurnaceEntity(0, 0, 0, E_BLOCK_FURNACE, 0, a_World);
- if (!FurnaceEntity->LoadFromJson(Furnace))
- {
- LOGERROR("ERROR READING FURNACE FROM JSON!" );
- delete FurnaceEntity;
- }
- else
- {
- a_BlockEntities.push_back(FurnaceEntity);
- }
- } // for itr - AllFurnaces[]
- }
-
- // Load signs
- Json::Value AllSigns = a_Value.get("Signs", Json::nullValue);
- if( !AllSigns.empty() )
- {
- for( Json::Value::iterator itr = AllSigns.begin(); itr != AllSigns.end(); ++itr )
- {
- Json::Value & Sign = *itr;
- cSignEntity * SignEntity = new cSignEntity( E_BLOCK_SIGN_POST, 0,0,0, a_World);
- if ( !SignEntity->LoadFromJson( Sign ) )
- {
- LOGERROR("ERROR READING SIGN FROM JSON!" );
- delete SignEntity;
- }
- else
- {
- a_BlockEntities.push_back( SignEntity );
- }
- } // for itr - AllSigns[]
- }
-
- // Load note blocks
- Json::Value AllNotes = a_Value.get("Notes", Json::nullValue);
- if( !AllNotes.empty() )
- {
- for( Json::Value::iterator itr = AllNotes.begin(); itr != AllNotes.end(); ++itr )
- {
- Json::Value & Note = *itr;
- cNoteEntity * NoteEntity = new cNoteEntity(0, 0, 0, a_World);
- if ( !NoteEntity->LoadFromJson( Note ) )
- {
- LOGERROR("ERROR READING NOTE BLOCK FROM JSON!" );
- delete NoteEntity;
- }
- else
- {
- a_BlockEntities.push_back( NoteEntity );
- }
- } // for itr - AllNotes[]
- }
-
- // Load jukeboxes
- Json::Value AllJukeboxes = a_Value.get("Jukeboxes", Json::nullValue);
- if( !AllJukeboxes.empty() )
- {
- for( Json::Value::iterator itr = AllJukeboxes.begin(); itr != AllJukeboxes.end(); ++itr )
- {
- Json::Value & Jukebox = *itr;
- cJukeboxEntity * JukeboxEntity = new cJukeboxEntity(0, 0, 0, a_World);
- if ( !JukeboxEntity->LoadFromJson( Jukebox ) )
- {
- LOGERROR("ERROR READING JUKEBOX FROM JSON!" );
- delete JukeboxEntity;
- }
- else
- {
- a_BlockEntities.push_back( JukeboxEntity );
- }
- } // for itr - AllJukeboxes[]
- }
-}
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cWSSCompact::cPAKFile
-
-#define READ(Var) \
- if (f.Read(&Var, sizeof(Var)) != sizeof(Var)) \
- { \
- LOGERROR("ERROR READING %s FROM FILE %s (line %d); file offset %d", #Var, m_FileName.c_str(), __LINE__, f.Tell()); \
- return; \
- }
-
-cWSSCompact::cPAKFile::cPAKFile(const AString & a_FileName, int a_LayerX, int a_LayerZ) :
- m_FileName(a_FileName),
- m_LayerX(a_LayerX),
- m_LayerZ(a_LayerZ),
- m_NumDirty(0),
- m_ChunkVersion( CHUNK_VERSION ), // Init with latest version
- m_PakVersion( PAK_VERSION )
-{
- cFile f;
- if (!f.Open(m_FileName, cFile::fmRead))
- {
- return;
- }
-
- // Read headers:
- READ(m_PakVersion);
- if (m_PakVersion != 1)
- {
- LOGERROR("File \"%s\" is in an unknown pak format (%d)", m_FileName.c_str(), m_PakVersion);
- return;
- }
-
- READ(m_ChunkVersion);
- switch( m_ChunkVersion )
- {
- case 1:
- m_ChunkSize.Set(16, 128, 16);
- break;
- case 2:
- case 3:
- m_ChunkSize.Set(16, 256, 16);
- break;
- default:
- LOGERROR("File \"%s\" is in an unknown chunk format (%d)", m_FileName.c_str(), m_ChunkVersion);
- return;
- };
-
- short NumChunks = 0;
- READ(NumChunks);
-
- // Read chunk headers:
- for (int i = 0; i < NumChunks; i++)
- {
- sChunkHeader * Header = new sChunkHeader;
- READ(*Header);
- m_ChunkHeaders.push_back(Header);
- } // for i - chunk headers
-
- // Read chunk data:
- if (f.ReadRestOfFile(m_DataContents) == -1)
- {
- LOGERROR("Cannot read file \"%s\" contents", m_FileName.c_str());
- return;
- }
-
- if( m_ChunkVersion == 1 ) // Convert chunks to version 2
- {
- UpdateChunk1To2();
- }
-#if AXIS_ORDER == AXIS_ORDER_XZY
- if( m_ChunkVersion == 2 ) // Convert chunks to version 3
- {
- UpdateChunk2To3();
- }
-#endif
-}
-
-
-
-
-
-cWSSCompact::cPAKFile::~cPAKFile()
-{
- if (m_NumDirty > 0)
- {
- SynchronizeFile();
- }
- for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr)
- {
- delete *itr;
- }
-}
-
-
-
-
-
-bool cWSSCompact::cPAKFile::GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data)
-{
- int ChunkX = a_Chunk.m_ChunkX;
- int ChunkZ = a_Chunk.m_ChunkZ;
- sChunkHeader * Header = NULL;
- int Offset = 0;
- for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr)
- {
- if (((*itr)->m_ChunkX == ChunkX) && ((*itr)->m_ChunkZ == ChunkZ))
- {
- Header = *itr;
- break;
- }
- Offset += (*itr)->m_CompressedSize;
- }
- if ((Header == NULL) || (Offset + Header->m_CompressedSize > (int)m_DataContents.size()))
- {
- // Chunk not found / data invalid
- return false;
- }
-
- a_UncompressedSize = Header->m_UncompressedSize;
- a_Data.assign(m_DataContents, Offset, Header->m_CompressedSize);
- return true;
-}
-
-
-
-
-
-bool cWSSCompact::cPAKFile::SaveChunk(const cChunkCoords & a_Chunk, cWorld * a_World)
-{
- if (!SaveChunkToData(a_Chunk, a_World))
- {
- return false;
- }
- if (m_NumDirty > MAX_DIRTY_CHUNKS)
- {
- SynchronizeFile();
- }
- return true;
-}
-
-
-
-
-
-void cWSSCompact::cPAKFile::UpdateChunk1To2()
-{
- int Offset = 0;
- AString NewDataContents;
- int ChunksConverted = 0;
- for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr)
- {
- sChunkHeader * Header = *itr;
-
- if( ChunksConverted % 32 == 0 )
- {
- LOGINFO("Updating \"%s\" version 1 to version 2: %d %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size() );
- }
- ChunksConverted++;
-
- AString Data;
- int UncompressedSize = Header->m_UncompressedSize;
- Data.assign(m_DataContents, Offset, Header->m_CompressedSize);
- Offset += Header->m_CompressedSize;
-
- // Crude data integrity check:
- int ExpectedSize = (16*128*16)*2 + (16*128*16)/2; // For version 1
- if (UncompressedSize < ExpectedSize)
- {
- LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing",
- Header->m_ChunkX, Header->m_ChunkZ,
- UncompressedSize, ExpectedSize
- );
- Offset += Header->m_CompressedSize;
- continue;
- }
-
- // Decompress the data:
- AString UncompressedData;
- {
- int errorcode = UncompressString(Data.data(), Data.size(), UncompressedData, UncompressedSize);
- if (errorcode != Z_OK)
- {
- LOGERROR("Error %d decompressing data for chunk [%d, %d]",
- errorcode,
- Header->m_ChunkX, Header->m_ChunkZ
- );
- Offset += Header->m_CompressedSize;
- continue;
- }
- }
-
- if (UncompressedSize != (int)UncompressedData.size())
- {
- LOGWARNING("Uncompressed data size differs (exp %d bytes, got %d) for chunk [%d, %d]",
- UncompressedSize, UncompressedData.size(),
- Header->m_ChunkX, Header->m_ChunkZ
- );
- Offset += Header->m_CompressedSize;
- continue;
- }
-
-
- // Old version is 128 blocks high with YZX axis order
- char ConvertedData[cChunkDef::BlockDataSize];
- int Index = 0;
- unsigned int InChunkOffset = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z )
- {
- for( int y = 0; y < 128; ++y )
- {
- ConvertedData[Index++] = UncompressedData[y + z * 128 + x * 128 * 16 + InChunkOffset];
- }
- // Add 128 empty blocks after an old y column
- memset(ConvertedData + Index, E_BLOCK_AIR, 128);
- Index += 128;
- }
- InChunkOffset += (16 * 128 * 16);
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Metadata
- {
- for( int y = 0; y < 64; ++y )
- {
- ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset];
- }
- memset(ConvertedData + Index, 0, 64);
- Index += 64;
- }
- InChunkOffset += (16 * 128 * 16) / 2;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Block light
- {
- for( int y = 0; y < 64; ++y )
- {
- ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset];
- }
- memset(ConvertedData + Index, 0, 64);
- Index += 64;
- }
- InChunkOffset += (16*128*16)/2;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Sky light
- {
- for( int y = 0; y < 64; ++y )
- {
- ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset];
- }
- memset(ConvertedData + Index, 0, 64);
- Index += 64;
- }
- InChunkOffset += (16 * 128 * 16) / 2;
-
- AString Converted(ConvertedData, ARRAYCOUNT(ConvertedData));
-
- // Add JSON data afterwards
- if (UncompressedData.size() > InChunkOffset)
- {
- Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end() );
- }
-
- // Re-compress data
- AString CompressedData;
- {
- int errorcode = CompressString(Converted.data(), Converted.size(), CompressedData);
- if (errorcode != Z_OK)
- {
- LOGERROR("Error %d compressing data for chunk [%d, %d]",
- errorcode,
- Header->m_ChunkX, Header->m_ChunkZ
- );
- continue;
- }
- }
-
- // Save into file's cache
- Header->m_UncompressedSize = Converted.size();
- Header->m_CompressedSize = CompressedData.size();
- NewDataContents.append( CompressedData );
- }
-
- // Done converting
- m_DataContents = NewDataContents;
- m_ChunkVersion = 2;
- SynchronizeFile();
-
- LOGINFO("Updated \"%s\" version 1 to version 2", m_FileName.c_str() );
-}
-
-
-
-
-
-void cWSSCompact::cPAKFile::UpdateChunk2To3()
-{
- int Offset = 0;
- AString NewDataContents;
- int ChunksConverted = 0;
- for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr)
- {
- sChunkHeader * Header = *itr;
-
- if( ChunksConverted % 32 == 0 )
- {
- LOGINFO("Updating \"%s\" version 2 to version 3: %d %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size() );
- }
- ChunksConverted++;
-
- AString Data;
- int UncompressedSize = Header->m_UncompressedSize;
- Data.assign(m_DataContents, Offset, Header->m_CompressedSize);
- Offset += Header->m_CompressedSize;
-
- // Crude data integrity check:
- const int ExpectedSize = (16*256*16)*2 + (16*256*16)/2; // For version 2
- if (UncompressedSize < ExpectedSize)
- {
- LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing",
- Header->m_ChunkX, Header->m_ChunkZ,
- UncompressedSize, ExpectedSize
- );
- Offset += Header->m_CompressedSize;
- continue;
- }
-
- // Decompress the data:
- AString UncompressedData;
- {
- int errorcode = UncompressString(Data.data(), Data.size(), UncompressedData, UncompressedSize);
- if (errorcode != Z_OK)
- {
- LOGERROR("Error %d decompressing data for chunk [%d, %d]",
- errorcode,
- Header->m_ChunkX, Header->m_ChunkZ
- );
- Offset += Header->m_CompressedSize;
- continue;
- }
- }
-
- if (UncompressedSize != (int)UncompressedData.size())
- {
- LOGWARNING("Uncompressed data size differs (exp %d bytes, got %d) for chunk [%d, %d]",
- UncompressedSize, UncompressedData.size(),
- Header->m_ChunkX, Header->m_ChunkZ
- );
- Offset += Header->m_CompressedSize;
- continue;
- }
-
- char ConvertedData[ExpectedSize];
- memset(ConvertedData, 0, ExpectedSize);
-
- // Cannot use cChunk::MakeIndex because it might change again?????????
- // For compatibility, use what we know is current
- #define MAKE_2_INDEX( x, y, z ) ( y + (z * 256) + (x * 256 * 16) )
- #define MAKE_3_INDEX( x, y, z ) ( x + (z * 16) + (y * 16 * 16) )
-
- unsigned int InChunkOffset = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y ) // YZX Loop order is important, in 1.1 Y was first then Z then X
- {
- ConvertedData[ MAKE_3_INDEX(x, y, z) ] = UncompressedData[InChunkOffset];
- ++InChunkOffset;
- } // for y, z, x
-
-
- unsigned int index2 = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y )
- {
- ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4);
- ++index2;
- }
- InChunkOffset += index2 / 2;
- index2 = 0;
-
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y )
- {
- ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4);
- ++index2;
- }
- InChunkOffset += index2 / 2;
- index2 = 0;
-
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y )
- {
- ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4);
- ++index2;
- }
- InChunkOffset += index2 / 2;
- index2 = 0;
-
- AString Converted(ConvertedData, ExpectedSize);
-
- // Add JSON data afterwards
- if (UncompressedData.size() > InChunkOffset)
- {
- Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end() );
- }
-
- // Re-compress data
- AString CompressedData;
- {
- int errorcode = CompressString(Converted.data(), Converted.size(), CompressedData);
- if (errorcode != Z_OK)
- {
- LOGERROR("Error %d compressing data for chunk [%d, %d]",
- errorcode,
- Header->m_ChunkX, Header->m_ChunkZ
- );
- continue;
- }
- }
-
- // Save into file's cache
- Header->m_UncompressedSize = Converted.size();
- Header->m_CompressedSize = CompressedData.size();
- NewDataContents.append( CompressedData );
- }
-
- // Done converting
- m_DataContents = NewDataContents;
- m_ChunkVersion = 3;
- SynchronizeFile();
-
- LOGINFO("Updated \"%s\" version 2 to version 3", m_FileName.c_str() );
-}
-
-
-
-
-
-bool cWSSCompact::LoadChunkFromData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, const AString & a_Data, cWorld * a_World)
-{
- // Crude data integrity check:
- if (a_UncompressedSize < cChunkDef::BlockDataSize)
- {
- LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing",
- a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ,
- a_UncompressedSize, cChunkDef::BlockDataSize
- );
- EraseChunkData(a_Chunk);
- return false;
- }
-
- // Decompress the data:
- AString UncompressedData;
- int errorcode = UncompressString(a_Data.data(), a_Data.size(), UncompressedData, a_UncompressedSize);
- if (errorcode != Z_OK)
- {
- LOGERROR("Error %d decompressing data for chunk [%d, %d]",
- errorcode,
- a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ
- );
- return false;
- }
-
- if (a_UncompressedSize != (int)UncompressedData.size())
- {
- LOGWARNING("Uncompressed data size differs (exp %d bytes, got %d) for chunk [%d, %d]",
- a_UncompressedSize, UncompressedData.size(),
- a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ
- );
- return false;
- }
-
- cEntityList Entities;
- cBlockEntityList BlockEntities;
- bool IsLightValid = false;
-
- if (a_UncompressedSize > cChunkDef::BlockDataSize)
- {
- Json::Value root; // will contain the root value after parsing.
- Json::Reader reader;
- if ( !reader.parse( UncompressedData.data() + cChunkDef::BlockDataSize, root, false ) )
- {
- LOGERROR("Failed to parse trailing JSON in chunk [%d, %d]!",
- a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ
- );
- }
- else
- {
- LoadEntitiesFromJson(root, Entities, BlockEntities, a_World);
- IsLightValid = root.get("IsLightValid", false).asBool();
- }
- }
-
- BLOCKTYPE * BlockData = (BLOCKTYPE *)UncompressedData.data();
- NIBBLETYPE * MetaData = (NIBBLETYPE *)(BlockData + cChunkDef::MetaOffset);
- NIBBLETYPE * BlockLight = (NIBBLETYPE *)(BlockData + cChunkDef::LightOffset);
- NIBBLETYPE * SkyLight = (NIBBLETYPE *)(BlockData + cChunkDef::SkyLightOffset);
-
- a_World->SetChunkData(
- a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ,
- BlockData, MetaData,
- IsLightValid ? BlockLight : NULL,
- IsLightValid ? SkyLight : NULL,
- NULL, NULL,
- Entities, BlockEntities,
- false
- );
-
- return true;
-}
-
-
-
-
-
-bool cWSSCompact::cPAKFile::EraseChunkData(const cChunkCoords & a_Chunk)
-{
- int ChunkX = a_Chunk.m_ChunkX;
- int ChunkZ = a_Chunk.m_ChunkZ;
- int Offset = 0;
- for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr)
- {
- if (((*itr)->m_ChunkX == ChunkX) && ((*itr)->m_ChunkZ == ChunkZ))
- {
- m_DataContents.erase(Offset, (*itr)->m_CompressedSize);
- delete *itr;
- itr = m_ChunkHeaders.erase(itr);
- return true;
- }
- Offset += (*itr)->m_CompressedSize;
- }
-
- return false;
-}
-
-
-
-
-
-bool cWSSCompact::cPAKFile::SaveChunkToData(const cChunkCoords & a_Chunk, cWorld * a_World)
-{
- // Serialize the chunk:
- cJsonChunkSerializer Serializer;
- if (!a_World->GetChunkData(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, Serializer))
- {
- // Chunk not valid
- LOG("cWSSCompact: Trying to save chunk [%d, %d, %d] that has no data, ignoring request.", a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ);
- return false;
- }
-
- AString Data;
- Data.assign((const char *)Serializer.GetBlockData(), cChunkDef::BlockDataSize);
- if (Serializer.HasJsonData())
- {
- AString JsonData;
- Json::StyledWriter writer;
- JsonData = writer.write(Serializer.GetRoot());
- Data.append(JsonData);
- }
-
- // Compress the data:
- AString CompressedData;
- int errorcode = CompressString(Data.data(), Data.size(), CompressedData);
- if ( errorcode != Z_OK )
- {
- LOGERROR("Error %i compressing data for chunk [%d, %d, %d]", errorcode, a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ);
- return false;
- }
-
- // Erase any existing data for the chunk:
- EraseChunkData(a_Chunk);
-
- // Save the header:
- sChunkHeader * Header = new sChunkHeader;
- if (Header == NULL)
- {
- LOGWARNING("Cannot create a new chunk header to save chunk [%d, %d, %d]", a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ);
- return false;
- }
- Header->m_CompressedSize = (int)CompressedData.size();
- Header->m_ChunkX = a_Chunk.m_ChunkX;
- Header->m_ChunkZ = a_Chunk.m_ChunkZ;
- Header->m_UncompressedSize = (int)Data.size();
- m_ChunkHeaders.push_back(Header);
-
- m_DataContents.append(CompressedData.data(), CompressedData.size());
-
- m_NumDirty++;
- return true;
-}
-
-
-
-
-
-#define WRITE(Var) \
- if (f.Write(&Var, sizeof(Var)) != sizeof(Var)) \
- { \
- LOGERROR("cWSSCompact: ERROR writing %s to file \"%s\" (line %d); file offset %d", #Var, m_FileName.c_str(), __LINE__, f.Tell()); \
- return; \
- }
-
-void cWSSCompact::cPAKFile::SynchronizeFile(void)
-{
- cFile f;
- if (!f.Open(m_FileName, cFile::fmWrite))
- {
- LOGERROR("Cannot open PAK file \"%s\" for writing", m_FileName.c_str());
- return;
- }
-
- WRITE(m_PakVersion);
- WRITE(m_ChunkVersion);
- short NumChunks = (short)m_ChunkHeaders.size();
- WRITE(NumChunks);
- for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr)
- {
- WRITE(**itr);
- }
- if (f.Write(m_DataContents.data(), m_DataContents.size()) != (int)m_DataContents.size())
- {
- LOGERROR("cWSSCompact: ERROR writing chunk contents to file \"%s\" (line %d); file offset %d", m_FileName.c_str(), __LINE__, f.Tell());
- return;
- }
- m_NumDirty = 0;
-}
-
-
-
-
diff --git a/source/WorldStorage/WSSCompact.h b/source/WorldStorage/WSSCompact.h
deleted file mode 100644
index e6a013eaf..000000000
--- a/source/WorldStorage/WSSCompact.h
+++ /dev/null
@@ -1,144 +0,0 @@
-
-// WSSCompact.h
-
-// Interfaces to the cWSSCompact class representing the "Compact" storage schema (PAK-files)
-
-
-
-
-
-#pragma once
-#ifndef WSSCOMPACT_H_INCLUDED
-#define WSSCOMPACT_H_INCLUDED
-
-#include "WorldStorage.h"
-#include "../Vector3i.h"
-
-
-
-
-
-/// Helper class for serializing a chunk into Json
-class cJsonChunkSerializer :
- public cChunkDataCollector
-{
-public:
-
- cJsonChunkSerializer(void);
-
- Json::Value & GetRoot (void) {return m_Root; }
- BLOCKTYPE * GetBlockData(void) {return (BLOCKTYPE *)m_BlockData; }
- bool HasJsonData (void) const {return m_HasJsonData; }
-
-protected:
-
- // NOTE: block data is serialized into inherited cChunkDataCollector's m_BlockData[] array
-
- // Entities and BlockEntities are serialized to Json
- Json::Value m_Root;
- bool m_HasJsonData;
-
- // cChunkDataCollector overrides:
- virtual void Entity (cEntity * a_Entity) override;
- virtual void BlockEntity (cBlockEntity * a_Entity) override;
- virtual bool LightIsValid (bool a_IsLightValid) override;
-} ;
-
-
-
-
-
-class cWSSCompact :
- public cWSSchema
-{
-public:
- cWSSCompact(cWorld * a_World) : cWSSchema(a_World) {}
- virtual ~cWSSCompact();
-
-protected:
-
- struct sChunkHeader;
- typedef std::vector<sChunkHeader *> sChunkHeaders;
-
- /// Implements a cache for a single PAK file; implements lazy-write in order to be able to write multiple chunks fast
- class cPAKFile
- {
- public:
-
- cPAKFile(const AString & a_FileName, int a_LayerX, int a_LayerZ);
- ~cPAKFile();
-
- bool GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data);
- bool SetChunkData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data);
- bool EraseChunkData(const cChunkCoords & a_Chunk);
-
- bool SaveChunk(const cChunkCoords & a_Chunk, cWorld * a_World);
-
- int GetLayerX(void) const {return m_LayerX; }
- int GetLayerZ(void) const {return m_LayerZ; }
-
- static const int PAK_VERSION = 1;
-#if AXIS_ORDER == AXIS_ORDER_XZY
- static const int CHUNK_VERSION = 3;
-#elif AXIS_ORDER == AXIS_ORDER_YZX
- static const int CHUNK_VERSION = 2;
-#endif
- protected:
-
- AString m_FileName;
- int m_LayerX;
- int m_LayerZ;
-
- sChunkHeaders m_ChunkHeaders;
- AString m_DataContents; // Data contents of the file, cached
-
- int m_NumDirty; // Number of chunks that were written into m_DataContents but not into the file
-
- Vector3i m_ChunkSize; // Is related to m_ChunkVersion
- char m_ChunkVersion;
- char m_PakVersion;
-
- bool SaveChunkToData(const cChunkCoords & a_Chunk, cWorld * a_World); // Saves the chunk to m_DataContents, updates headers and m_NumDirty
- void SynchronizeFile(void); // Writes m_DataContents along with the headers to file, resets m_NumDirty
-
- void UpdateChunk1To2(void); // Height from 128 to 256
- void UpdateChunk2To3(void); // Axis order from YZX to XZY
- } ;
-
- typedef std::list<cPAKFile *> cPAKFiles;
-
- cCriticalSection m_CS;
- cPAKFiles m_PAKFiles; // A MRU cache of PAK files
-
- /// Loads the correct PAK file either from cache or from disk, manages the m_PAKFiles cache
- cPAKFile * LoadPAKFile(const cChunkCoords & a_Chunk);
-
- /// Gets chunk data from the correct file; locks CS as needed
- bool GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data);
-
- /// Sets chunk data to the correct file; locks CS as needed
- bool SetChunkData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data);
-
- /// Erases chunk data from the correct file; locks CS as needed
- bool EraseChunkData(const cChunkCoords & a_Chunk);
-
- /// Loads the chunk from the data (no locking needed)
- bool LoadChunkFromData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, const AString & a_Data, cWorld * a_World);
-
- void LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_Entities, cBlockEntityList & a_BlockEntities, cWorld * a_World);
-
- // cWSSchema overrides:
- virtual bool LoadChunk(const cChunkCoords & a_Chunk) override;
- virtual bool SaveChunk(const cChunkCoords & a_Chunk) override;
- virtual const AString GetName(void) const override {return "compact"; }
-} ;
-
-
-
-
-
-#endif // WSSCOMPACT_H_INCLUDED
-
-
-
-
diff --git a/source/WorldStorage/WorldStorage.cpp b/source/WorldStorage/WorldStorage.cpp
deleted file mode 100644
index f290ec128..000000000
--- a/source/WorldStorage/WorldStorage.cpp
+++ /dev/null
@@ -1,409 +0,0 @@
-
-// WorldStorage.cpp
-
-// Implements the cWorldStorage class representing the chunk loading / saving thread
-
-// To add a new storage schema, implement a cWSSchema descendant and add it to cWorldStorage::InitSchemas()
-
-#include "Globals.h"
-#include "WorldStorage.h"
-#include "WSSCompact.h"
-#include "WSSAnvil.h"
-#include "../World.h"
-#include "../Generating/ChunkGenerator.h"
-#include "../Entities/Entity.h"
-#include "../BlockEntities/BlockEntity.h"
-
-
-
-
-
-/// If a chunk with this Y coord is de-queued, it is a signal to emit the saved-all message (cWorldStorage::QueueSavedMessage())
-#define CHUNK_Y_MESSAGE 2
-
-
-
-
-
-/// Example storage schema - forgets all chunks ;)
-class cWSSForgetful :
- public cWSSchema
-{
-public:
- cWSSForgetful(cWorld * a_World) : cWSSchema(a_World) {}
-
-protected:
- // cWSSchema overrides:
- virtual bool LoadChunk(const cChunkCoords & a_Chunk) override {return false; }
- virtual bool SaveChunk(const cChunkCoords & a_Chunk) override {return true; }
- virtual const AString GetName(void) const override {return "forgetful"; }
-} ;
-
-
-
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// cWorldStorage:
-
-cWorldStorage::cWorldStorage(void) :
- super("cWorldStorage"),
- m_World(NULL),
- m_SaveSchema(NULL)
-{
-}
-
-
-
-
-
-cWorldStorage::~cWorldStorage()
-{
- for (cWSSchemaList::iterator itr = m_Schemas.begin(); itr != m_Schemas.end(); ++itr)
- {
- delete *itr;
- } // for itr - m_Schemas[]
- m_LoadQueue.clear();
- m_SaveQueue.clear();
-}
-
-
-
-
-
-bool cWorldStorage::Start(cWorld * a_World, const AString & a_StorageSchemaName)
-{
- m_World = a_World;
- m_StorageSchemaName = a_StorageSchemaName;
- InitSchemas();
-
- return super::Start();
-}
-
-
-
-
-
-void cWorldStorage::Stop(void)
-{
- WaitForFinish();
-}
-
-
-
-
-
-void cWorldStorage::WaitForFinish(void)
-{
- LOG("Waiting for the world storage to finish saving");
-
- {
- // Cancel all loading requests:
- cCSLock Lock(m_CSQueues);
- m_LoadQueue.clear();
- }
-
- // Wait for the saving to finish:
- WaitForQueuesEmpty();
-
- // Wait for the thread to finish:
- m_ShouldTerminate = true;
- m_Event.Set();
- m_evtRemoved.Set(); // Wake up anybody waiting in the WaitForQueuesEmpty() method
- super::Wait();
- LOG("World storage thread finished");
-}
-
-
-
-
-
-void cWorldStorage::WaitForQueuesEmpty(void)
-{
- cCSLock Lock(m_CSQueues);
- while (!m_ShouldTerminate && (!m_LoadQueue.empty() || !m_SaveQueue.empty()))
- {
- cCSUnlock Unlock(Lock);
- m_evtRemoved.Wait();
- }
-}
-
-
-
-
-
-int cWorldStorage::GetLoadQueueLength(void)
-{
- cCSLock Lock(m_CSQueues);
- return (int)m_LoadQueue.size();
-}
-
-
-
-
-
-int cWorldStorage::GetSaveQueueLength(void)
-{
- cCSLock Lock(m_CSQueues);
- return (int)m_SaveQueue.size();
-}
-
-
-
-
-
-void cWorldStorage::QueueLoadChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ, bool a_Generate)
-{
- // Queues the chunk for loading; if not loaded, the chunk will be generated
- {
- cCSLock Lock(m_CSQueues);
-
- // Check if already in the queue:
- for (sChunkLoadQueue::iterator itr = m_LoadQueue.begin(); itr != m_LoadQueue.end(); ++itr)
- {
- if ((itr->m_ChunkX == a_ChunkX) && (itr->m_ChunkY == a_ChunkY) && (itr->m_ChunkZ == a_ChunkZ) && (itr->m_Generate == a_Generate))
- {
- return;
- }
- }
- m_LoadQueue.push_back(sChunkLoad(a_ChunkX, a_ChunkY, a_ChunkZ, a_Generate));
- }
-
- m_Event.Set();
-}
-
-
-
-
-
-void cWorldStorage::QueueSaveChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ)
-{
- {
- cCSLock Lock(m_CSQueues);
- m_SaveQueue.remove (cChunkCoords(a_ChunkX, a_ChunkY, a_ChunkZ)); // Don't add twice
- m_SaveQueue.push_back(cChunkCoords(a_ChunkX, a_ChunkY, a_ChunkZ));
- }
- m_Event.Set();
-}
-
-
-
-
-
-void cWorldStorage::QueueSavedMessage(void)
-{
- // Pushes a special coord pair into the queue, signalizing a message instead:
- {
- cCSLock Lock(m_CSQueues);
- m_SaveQueue.push_back(cChunkCoords(0, CHUNK_Y_MESSAGE, 0));
- }
- m_Event.Set();
-}
-
-
-
-
-
-void cWorldStorage::UnqueueLoad(int a_ChunkX, int a_ChunkY, int a_ChunkZ)
-{
- cCSLock Lock(m_CSQueues);
- for (sChunkLoadQueue::iterator itr = m_LoadQueue.begin(); itr != m_LoadQueue.end(); ++itr)
- {
- if ((itr->m_ChunkX != a_ChunkX) || (itr->m_ChunkY != a_ChunkY) || (itr->m_ChunkZ != a_ChunkZ))
- {
- continue;
- }
- m_LoadQueue.erase(itr);
- Lock.Unlock();
- m_evtRemoved.Set();
- return;
- } // for itr - m_LoadQueue[]
-}
-
-
-
-
-
-void cWorldStorage::UnqueueSave(const cChunkCoords & a_Chunk)
-{
- {
- cCSLock Lock(m_CSQueues);
- m_SaveQueue.remove(a_Chunk);
- }
- m_evtRemoved.Set();
-}
-
-
-
-
-
-void cWorldStorage::InitSchemas(void)
-{
- // The first schema added is considered the default
- m_Schemas.push_back(new cWSSAnvil (m_World));
- m_Schemas.push_back(new cWSSCompact (m_World));
- m_Schemas.push_back(new cWSSForgetful(m_World));
- // Add new schemas here
-
- if (NoCaseCompare(m_StorageSchemaName, "default") == 0)
- {
- m_SaveSchema = m_Schemas.front();
- return;
- }
- for (cWSSchemaList::iterator itr = m_Schemas.begin(); itr != m_Schemas.end(); ++itr)
- {
- if (NoCaseCompare((*itr)->GetName(), m_StorageSchemaName) == 0)
- {
- m_SaveSchema = *itr;
- return;
- }
- } // for itr - m_Schemas[]
-
- // Unknown schema selected, let the admin know:
- LOGWARNING("Unknown storage schema name \"%s\". Using default (\"%s\"). Available schemas:",
- m_StorageSchemaName.c_str(), m_SaveSchema->GetName().c_str()
- );
- for (cWSSchemaList::iterator itr = m_Schemas.begin(); itr != m_Schemas.end(); ++itr)
- {
- LOGWARNING("\t\"%s\"", (*itr)->GetName().c_str());
- }
- m_SaveSchema = m_Schemas.front();
-}
-
-
-
-
-
-void cWorldStorage::Execute(void)
-{
- while (!m_ShouldTerminate)
- {
- m_Event.Wait();
-
- // Process both queues until they are empty again:
- bool HasMore;
- do
- {
- HasMore = false;
- if (m_ShouldTerminate)
- {
- return;
- }
-
- HasMore = LoadOneChunk();
- HasMore = HasMore | SaveOneChunk();
- m_evtRemoved.Set();
- } while (HasMore);
- }
-}
-
-
-
-
-
-bool cWorldStorage::LoadOneChunk(void)
-{
- sChunkLoad ToLoad(0, 0, 0, false);
- bool HasMore;
- bool ShouldLoad = false;
- {
- cCSLock Lock(m_CSQueues);
- if (!m_LoadQueue.empty())
- {
- ToLoad = m_LoadQueue.front();
- m_LoadQueue.pop_front();
- ShouldLoad = true;
- }
- HasMore = !m_LoadQueue.empty();
- }
-
- if (ShouldLoad && !LoadChunk(ToLoad.m_ChunkX, ToLoad.m_ChunkY, ToLoad.m_ChunkZ))
- {
- if (ToLoad.m_Generate)
- {
- // The chunk couldn't be loaded, generate it:
- m_World->GetGenerator().QueueGenerateChunk(ToLoad.m_ChunkX, ToLoad.m_ChunkY, ToLoad.m_ChunkZ);
- }
- else
- {
- // TODO: Notify the world that the load has failed:
- // m_World->ChunkLoadFailed(ToLoad.m_ChunkX, ToLoad.m_ChunkY, ToLoad.m_ChunkZ);
- }
- }
- return HasMore;
-}
-
-
-
-
-
-bool cWorldStorage::SaveOneChunk(void)
-{
- cChunkCoords Save(0, 0, 0);
- bool HasMore;
- bool ShouldSave = false;
- {
- cCSLock Lock(m_CSQueues);
- if (!m_SaveQueue.empty())
- {
- Save = m_SaveQueue.front();
- m_SaveQueue.pop_front();
- ShouldSave = true;
- }
- HasMore = !m_SaveQueue.empty();
- }
- if (Save.m_ChunkY == CHUNK_Y_MESSAGE)
- {
- LOGINFO("Saved all chunks in world %s", m_World->GetName().c_str());
- return HasMore;
- }
- if (ShouldSave && m_World->IsChunkValid(Save.m_ChunkX, Save.m_ChunkZ))
- {
- m_World->MarkChunkSaving(Save.m_ChunkX, Save.m_ChunkZ);
- if (m_SaveSchema->SaveChunk(Save))
- {
- m_World->MarkChunkSaved(Save.m_ChunkX, Save.m_ChunkZ);
- }
- }
- return HasMore;
-}
-
-
-
-
-
-bool cWorldStorage::LoadChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ)
-{
- if (m_World->IsChunkValid(a_ChunkX, a_ChunkZ))
- {
- // Already loaded (can happen, since the queue is async)
- return true;
- }
-
- cChunkCoords Coords(a_ChunkX, a_ChunkY, a_ChunkZ);
-
- // First try the schema that is used for saving
- if (m_SaveSchema->LoadChunk(Coords))
- {
- return true;
- }
-
- // If it didn't have the chunk, try all the other schemas:
- for (cWSSchemaList::iterator itr = m_Schemas.begin(); itr != m_Schemas.end(); ++itr)
- {
- if (((*itr) != m_SaveSchema) && (*itr)->LoadChunk(Coords))
- {
- return true;
- }
- }
-
- // Notify the chunk owner that the chunk failed to load (sets cChunk::m_HasLoadFailed to true):
- m_World->ChunkLoadFailed(a_ChunkX, a_ChunkY, a_ChunkZ);
-
- return false;
-}
-
-
-
-
-
diff --git a/source/WorldStorage/WorldStorage.h b/source/WorldStorage/WorldStorage.h
deleted file mode 100644
index bf8dbd3d5..000000000
--- a/source/WorldStorage/WorldStorage.h
+++ /dev/null
@@ -1,135 +0,0 @@
-
-// WorldStorage.h
-
-// Interfaces to the cWorldStorage class representing the chunk loading / saving thread
-// This class decides which storage schema to use for saving; it queries all available schemas for loading
-// Also declares the base class for all storage schemas, cWSSchema
-// Helper serialization class cJsonChunkSerializer is declared as well
-
-
-
-
-
-#pragma once
-#ifndef WORLDSTORAGE_H_INCLUDED
-#define WORLDSTORAGE_H_INCLUDED
-
-#include "../ChunkDef.h"
-#include "../OSSupport/IsThread.h"
-#include <json/json.h>
-
-
-
-
-
-// fwd:
-class cWorld;
-
-
-
-
-
-/// Interface that all the world storage schemas need to implement
-class cWSSchema abstract
-{
-public:
- cWSSchema(cWorld * a_World) : m_World(a_World) {}
- virtual ~cWSSchema() {} // Force the descendants' destructors to be virtual
-
- virtual bool LoadChunk(const cChunkCoords & a_Chunk) = 0;
- virtual bool SaveChunk(const cChunkCoords & a_Chunk) = 0;
- virtual const AString GetName(void) const = 0;
-
-protected:
-
- cWorld * m_World;
-} ;
-
-typedef std::list<cWSSchema *> cWSSchemaList;
-
-
-
-
-
-/// The actual world storage class
-class cWorldStorage :
- public cIsThread
-{
- typedef cIsThread super;
-
-public:
-
- cWorldStorage(void);
- ~cWorldStorage();
-
- void QueueLoadChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ, bool a_Generate); // Queues the chunk for loading; if not loaded, the chunk will be generated if a_Generate is true
- void QueueSaveChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ);
-
- /// Signals that a message should be output to the console when all the chunks have been saved
- void QueueSavedMessage(void);
-
- /// Loads the chunk specified; returns true on success, false on failure
- bool LoadChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ);
-
- void UnqueueLoad(int a_ChunkX, int a_ChunkY, int a_ChunkZ);
- void UnqueueSave(const cChunkCoords & a_Chunk);
-
- bool Start(cWorld * a_World, const AString & a_StorageSchemaName); // Hide the cIsThread's Start() method, we need to provide args
- void Stop(void); // Hide the cIsThread's Stop() method, we need to signal the event
- void WaitForFinish(void);
- void WaitForQueuesEmpty(void);
-
- int GetLoadQueueLength(void);
- int GetSaveQueueLength(void);
-
-protected:
-
- struct sChunkLoad
- {
- int m_ChunkX;
- int m_ChunkY;
- int m_ChunkZ;
- bool m_Generate; // If true, the chunk will be generated if it cannot be loaded
-
- sChunkLoad(int a_ChunkX, int a_ChunkY, int a_ChunkZ, bool a_Generate) : m_ChunkX(a_ChunkX), m_ChunkY(a_ChunkY), m_ChunkZ(a_ChunkZ), m_Generate(a_Generate) {}
- } ;
-
- typedef std::list<sChunkLoad> sChunkLoadQueue;
-
- cWorld * m_World;
- AString m_StorageSchemaName;
-
- // Both queues are locked by the same CS
- cCriticalSection m_CSQueues;
- sChunkLoadQueue m_LoadQueue;
- cChunkCoordsList m_SaveQueue;
-
- cEvent m_Event; // Set when there's any addition to the queues
- cEvent m_evtRemoved; // Set when an item has been removed from the queue, either by the worker thread or the Unqueue methods
-
- /// All the storage schemas (all used for loading)
- cWSSchemaList m_Schemas;
-
- /// The one storage schema used for saving
- cWSSchema * m_SaveSchema;
-
- void InitSchemas(void);
-
- virtual void Execute(void) override;
-
- /// Loads one chunk from the queue (if any queued); returns true if there are more chunks in the load queue
- bool LoadOneChunk(void);
-
- /// Saves one chunk from the queue (if any queued); returns true if there are more chunks in the save queue
- bool SaveOneChunk(void);
-} ;
-
-
-
-
-
-#endif // WORLDSTORAGE_H_INCLUDED
-
-
-
-