summaryrefslogtreecommitdiffstats
path: root/src/WorldStorage/WSSAnvil.cpp
diff options
context:
space:
mode:
authorHowaner <franzi.moos@googlemail.com>2014-09-12 19:13:46 +0200
committerHowaner <franzi.moos@googlemail.com>2014-09-12 19:13:46 +0200
commit1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f (patch)
treef7cf72e49a7ca60d10af12ad9ff70910e089b028 /src/WorldStorage/WSSAnvil.cpp
parentRenamed SetWalkSpeed() to SetRelativeWalkSpeed() (diff)
parentFixed iron ore drop. (diff)
downloadcuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.tar
cuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.tar.gz
cuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.tar.bz2
cuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.tar.lz
cuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.tar.xz
cuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.tar.zst
cuberite-1223a24d3c87832c08545e6f3e8e4c51ff1e7e3f.zip
Diffstat (limited to 'src/WorldStorage/WSSAnvil.cpp')
-rw-r--r--src/WorldStorage/WSSAnvil.cpp72
1 files changed, 51 insertions, 21 deletions
diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp
index e79cc291d..fe309ce4e 100644
--- a/src/WorldStorage/WSSAnvil.cpp
+++ b/src/WorldStorage/WSSAnvil.cpp
@@ -66,8 +66,17 @@ Since only the header is actually in the memory, this number can be high, but st
*/
#define MAX_MCA_FILES 32
-/// The maximum size of an inflated chunk; raw chunk data is 192 KiB, allow 64 KiB more of entities
-#define CHUNK_INFLATE_MAX 256 KiB
+#define LOAD_FAILED(CHX, CHZ) \
+ { \
+ const int RegionX = FAST_FLOOR_DIV(CHX, 32); \
+ const int RegionZ = FAST_FLOOR_DIV(CHZ, 32); \
+ LOGERROR("%s (%d): Loading chunk [%d, %d] from file r.%d.%d.mca failed. " \
+ "The server will now abort in order to avoid further data loss. " \
+ "Please add the reported file and this message to the issue report.", \
+ __FUNCTION__, __LINE__, CHX, CHZ, RegionX, RegionZ \
+ ); \
+ *((volatile int *)0) = 0; /* Crash intentionally */ \
+ }
@@ -248,29 +257,22 @@ cWSSAnvil::cMCAFile * cWSSAnvil::LoadMCAFile(const cChunkCoords & a_Chunk)
bool cWSSAnvil::LoadChunkFromData(const cChunkCoords & a_Chunk, const AString & a_Data)
{
- // Decompress the data:
- char Uncompressed[CHUNK_INFLATE_MAX];
- z_stream strm;
- strm.zalloc = (alloc_func)NULL;
- strm.zfree = (free_func)NULL;
- strm.opaque = NULL;
- inflateInit(&strm);
- strm.next_out = (Bytef *)Uncompressed;
- strm.avail_out = sizeof(Uncompressed);
- strm.next_in = (Bytef *)a_Data.data();
- strm.avail_in = (uInt)a_Data.size();
- int res = inflate(&strm, Z_FINISH);
- inflateEnd(&strm);
- if (res != Z_STREAM_END)
+ // Uncompress the data:
+ AString Uncompressed;
+ int res = InflateString(a_Data.data(), a_Data.size(), Uncompressed);
+ if (res != Z_OK)
{
+ LOGWARNING("Uncompressing chunk [%d, %d] failed: %d", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, res);
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
// Parse the NBT data:
- cParsedNBT NBT(Uncompressed, strm.total_out);
+ cParsedNBT NBT(Uncompressed.data(), Uncompressed.size());
if (!NBT.IsValid())
{
// NBT Parsing failed
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
@@ -317,11 +319,19 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT
int Level = a_NBT.FindChildByName(0, "Level");
if (Level < 0)
{
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
int Sections = a_NBT.FindChildByName(Level, "Sections");
- if ((Sections < 0) || (a_NBT.GetType(Sections) != TAG_List) || (a_NBT.GetChildrenType(Sections) != TAG_Compound))
+ if ((Sections < 0) || (a_NBT.GetType(Sections) != TAG_List))
+ {
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
+ return false;
+ }
+ eTagType SectionsType = a_NBT.GetChildrenType(Sections);
+ if ((SectionsType != TAG_Compound) && (SectionsType != TAG_End))
{
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
for (int Child = a_NBT.GetFirstChild(Sections); Child >= 0; Child = a_NBT.GetNextSibling(Child))
@@ -585,7 +595,7 @@ void cWSSAnvil::LoadBlockEntitiesFromNBT(cBlockEntityList & a_BlockEntities, con
// Get the BlockEntity's position
int x, y, z;
- if (!GetBlockEntityNBTPos(a_NBT, Child, x, y, z))
+ if (!GetBlockEntityNBTPos(a_NBT, Child, x, y, z) || (y < 0) || (y >= cChunkDef::Height))
{
LOGWARNING("Bad block entity, missing the coords. Will be ignored.");
continue;
@@ -613,6 +623,8 @@ void cWSSAnvil::LoadBlockEntitiesFromNBT(cBlockEntityList & a_BlockEntities, con
cBlockEntity * cWSSAnvil::LoadBlockEntityFromNBT(const cParsedNBT & a_NBT, int a_Tag, int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE a_BlockType, NIBBLETYPE a_BlockMeta)
{
+ ASSERT((a_BlockY >= 0) && (a_BlockY < cChunkDef::Height));
+
// Load the specific BlockEntity type:
switch (a_BlockType)
{
@@ -2811,30 +2823,42 @@ bool cWSSAnvil::cMCAFile::GetChunkData(const cChunkCoords & a_Chunk, AString & a
}
unsigned ChunkLocation = ntohl(m_Header[LocalX + 32 * LocalZ]);
unsigned ChunkOffset = ChunkLocation >> 8;
+ if (ChunkOffset < 2)
+ {
+ return false;
+ }
m_File.Seek((int)ChunkOffset * 4096);
int ChunkSize = 0;
if (m_File.Read(&ChunkSize, 4) != 4)
{
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
ChunkSize = ntohl((u_long)ChunkSize);
char CompressionType = 0;
if (m_File.Read(&CompressionType, 1) != 1)
{
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
if (CompressionType != 2)
{
// Chunk is in an unknown compression
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
return false;
}
ChunkSize--;
// HACK: This depends on the internal knowledge that AString's data() function returns the internal buffer directly
a_Data.assign(ChunkSize, '\0');
- return (m_File.Read((void *)a_Data.data(), ChunkSize) == ChunkSize);
+ if (m_File.Read((void *)a_Data.data(), ChunkSize) == ChunkSize)
+ {
+ return true;
+ }
+ LOAD_FAILED(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ);
+ return false;
}
@@ -2889,7 +2913,13 @@ bool cWSSAnvil::cMCAFile::SetChunkData(const cChunkCoords & a_Chunk, const AStri
// Store the header:
ChunkSize = ((u_long)a_Data.size() + MCA_CHUNK_HEADER_LENGTH + 4095) / 4096; // Round data size *up* to nearest 4KB sector, make it a sector number
- ASSERT(ChunkSize < 256);
+ if (ChunkSize > 255)
+ {
+ LOGWARNING("Cannot save chunk [%d, %d], the data is too large (%u KiB, maximum is 1024 KiB). Remove some entities and retry.",
+ a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, (unsigned)(ChunkSize * 4)
+ );
+ return false;
+ }
m_Header[LocalX + 32 * LocalZ] = htonl((ChunkSector << 8) | ChunkSize);
if (m_File.Seek(0) < 0)
{