summaryrefslogtreecommitdiffstats
path: root/src/WorldStorage
diff options
context:
space:
mode:
Diffstat (limited to 'src/WorldStorage')
-rw-r--r--src/WorldStorage/CMakeLists.txt35
-rw-r--r--src/WorldStorage/NBTChunkSerializer.cpp3
-rw-r--r--src/WorldStorage/WSSAnvil.cpp2
-rw-r--r--src/WorldStorage/WSSAnvil.h2
-rw-r--r--src/WorldStorage/WSSCompact.cpp108
-rw-r--r--src/WorldStorage/WorldStorage.cpp8
-rw-r--r--src/WorldStorage/WorldStorage.h7
7 files changed, 98 insertions, 67 deletions
diff --git a/src/WorldStorage/CMakeLists.txt b/src/WorldStorage/CMakeLists.txt
index 2844f7fe5..a00ff3b2f 100644
--- a/src/WorldStorage/CMakeLists.txt
+++ b/src/WorldStorage/CMakeLists.txt
@@ -4,11 +4,34 @@ project (MCServer)
include_directories ("${PROJECT_SOURCE_DIR}/../")
-file(GLOB SOURCE
- "*.cpp"
- "*.h"
-)
+SET (SRCS
+ EnchantmentSerializer.cpp
+ FastNBT.cpp
+ FireworksSerializer.cpp
+ MapSerializer.cpp
+ NBTChunkSerializer.cpp
+ SchematicFileSerializer.cpp
+ ScoreboardSerializer.cpp
+ StatSerializer.cpp
+ WSSAnvil.cpp
+ WSSCompact.cpp
+ WorldStorage.cpp)
-add_library(WorldStorage ${SOURCE})
+SET (HDRS
+ EnchantmentSerializer.h
+ FastNBT.h
+ FireworksSerializer.h
+ MapSerializer.h
+ NBTChunkSerializer.h
+ SchematicFileSerializer.h
+ ScoreboardSerializer.h
+ StatSerializer.h
+ WSSAnvil.h
+ WSSCompact.h
+ WorldStorage.h)
-target_link_libraries(WorldStorage OSSupport)
+if(NOT MSVC)
+ add_library(WorldStorage ${SRCS} ${HDRS})
+
+ target_link_libraries(WorldStorage OSSupport)
+endif()
diff --git a/src/WorldStorage/NBTChunkSerializer.cpp b/src/WorldStorage/NBTChunkSerializer.cpp
index 702690872..b7a3d40ce 100644
--- a/src/WorldStorage/NBTChunkSerializer.cpp
+++ b/src/WorldStorage/NBTChunkSerializer.cpp
@@ -614,7 +614,8 @@ void cNBTChunkSerializer::AddProjectileEntity(cProjectileEntity * a_Projectile)
m_Writer.AddInt("EffectDuration", (Int16)Potion->GetEntityEffect().GetDuration());
m_Writer.AddShort("EffectIntensity", Potion->GetEntityEffect().GetIntensity());
m_Writer.AddDouble("EffectDistanceModifier", Potion->GetEntityEffect().GetDistanceModifier());
- m_Writer.AddInt("PotionName", Potion->GetPotionParticleType());
+ m_Writer.AddInt("PotionName", Potion->GetPotionColor());
+ break;
}
case cProjectileEntity::pkGhastFireball:
{
diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp
index 1dbf20bc0..a5c1fefda 100644
--- a/src/WorldStorage/WSSAnvil.cpp
+++ b/src/WorldStorage/WSSAnvil.cpp
@@ -1681,7 +1681,7 @@ void cWSSAnvil::LoadSplashPotionFromNBT(cEntityList & a_Entities, const cParsedN
SplashPotion->SetEntityEffectType((cEntityEffect::eType) a_NBT.FindChildByName(a_TagIdx, "EffectType"));
SplashPotion->SetEntityEffect(cEntityEffect(EffectDuration, EffectIntensity, EffectDistanceModifier));
- SplashPotion->SetPotionParticleType(a_NBT.FindChildByName(a_TagIdx, "PotionName"));
+ SplashPotion->SetPotionColor(a_NBT.FindChildByName(a_TagIdx, "PotionName"));
// Store the new splash potion in the entities list:
a_Entities.push_back(SplashPotion.release());
diff --git a/src/WorldStorage/WSSAnvil.h b/src/WorldStorage/WSSAnvil.h
index 5b629f017..9f8714404 100644
--- a/src/WorldStorage/WSSAnvil.h
+++ b/src/WorldStorage/WSSAnvil.h
@@ -153,7 +153,7 @@ protected:
void LoadPickupFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
void LoadTNTFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
void LoadExpOrbFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
- void LoadHangingFromNBT (cHangingEntity & a_Hanging,const cParsedNBT & a_NBT, int a_TagIdx);
+ void LoadHangingFromNBT (cHangingEntity & a_Hanging, const cParsedNBT & a_NBT, int a_TagIdx);
void LoadItemFrameFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
void LoadMinecartRFromNBT (cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx);
diff --git a/src/WorldStorage/WSSCompact.cpp b/src/WorldStorage/WSSCompact.cpp
index ff43b6905..5382a3e01 100644
--- a/src/WorldStorage/WSSCompact.cpp
+++ b/src/WorldStorage/WSSCompact.cpp
@@ -196,7 +196,7 @@ cWSSCompact::cPAKFile * cWSSCompact::LoadPAKFile(const cChunkCoords & a_Chunk)
// Load it anew:
AString FileName;
- Printf(FileName, "%s/X%i_Z%i.pak", m_World->GetName().c_str(), LayerX, LayerZ );
+ Printf(FileName, "%s/X%i_Z%i.pak", m_World->GetName().c_str(), LayerX, LayerZ);
cPAKFile * f = new cPAKFile(FileName, LayerX, LayerZ, m_CompressionFactor);
if (f == NULL)
{
@@ -271,12 +271,12 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
Json::Value AllChests = a_Value.get("Chests", Json::nullValue);
if (!AllChests.empty())
{
- for (Json::Value::iterator itr = AllChests.begin(); itr != AllChests.end(); ++itr )
+ for (Json::Value::iterator itr = AllChests.begin(); itr != AllChests.end(); ++itr)
{
std::auto_ptr<cChestEntity> ChestEntity(new cChestEntity(0, 0, 0, a_World, E_BLOCK_CHEST));
if (!ChestEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING CHEST FROM JSON!" );
+ LOGWARNING("ERROR READING CHEST FROM JSON!");
}
else
{
@@ -292,7 +292,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cDispenserEntity> DispenserEntity(new cDispenserEntity(0, 0, 0, a_World));
if (!DispenserEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING DISPENSER FROM JSON!" );
+ LOGWARNING("ERROR READING DISPENSER FROM JSON!");
}
else
{
@@ -307,7 +307,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cFlowerPotEntity> FlowerPotEntity(new cFlowerPotEntity(0, 0, 0, a_World));
if (!FlowerPotEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING FLOWERPOT FROM JSON!" );
+ LOGWARNING("ERROR READING FLOWERPOT FROM JSON!");
}
else
{
@@ -323,7 +323,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cFurnaceEntity> FurnaceEntity(new cFurnaceEntity(0, 0, 0, E_BLOCK_FURNACE, 0, a_World));
if (!FurnaceEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING FURNACE FROM JSON!" );
+ LOGWARNING("ERROR READING FURNACE FROM JSON!");
}
else
{
@@ -353,7 +353,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cNoteEntity> NoteEntity(new cNoteEntity(0, 0, 0, a_World));
if (!NoteEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING NOTE BLOCK FROM JSON!" );
+ LOGWARNING("ERROR READING NOTE BLOCK FROM JSON!");
}
else
{
@@ -368,7 +368,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cJukeboxEntity> JukeboxEntity(new cJukeboxEntity(0, 0, 0, a_World));
if (!JukeboxEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING JUKEBOX FROM JSON!" );
+ LOGWARNING("ERROR READING JUKEBOX FROM JSON!");
}
else
{
@@ -383,7 +383,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cCommandBlockEntity> CommandBlockEntity(new cCommandBlockEntity(0, 0, 0, a_World));
if (!CommandBlockEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING COMMAND BLOCK FROM JSON!" );
+ LOGWARNING("ERROR READING COMMAND BLOCK FROM JSON!");
}
else
{
@@ -398,7 +398,7 @@ void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_En
std::auto_ptr<cMobHeadEntity> MobHeadEntity(new cMobHeadEntity(0, 0, 0, a_World));
if (!MobHeadEntity->LoadFromJson(*itr))
{
- LOGWARNING("ERROR READING MOB HEAD FROM JSON!" );
+ LOGWARNING("ERROR READING MOB HEAD FROM JSON!");
}
else
{
@@ -427,8 +427,8 @@ cWSSCompact::cPAKFile::cPAKFile(const AString & a_FileName, int a_LayerX, int a_
m_LayerX(a_LayerX),
m_LayerZ(a_LayerZ),
m_NumDirty(0),
- m_ChunkVersion( CHUNK_VERSION ), // Init with latest version
- m_PakVersion( PAK_VERSION )
+ m_ChunkVersion( CHUNK_VERSION), // Init with latest version
+ m_PakVersion( PAK_VERSION)
{
cFile f;
if (!f.Open(m_FileName, cFile::fmRead))
@@ -445,18 +445,24 @@ cWSSCompact::cPAKFile::cPAKFile(const AString & a_FileName, int a_LayerX, int a_
}
READ(m_ChunkVersion);
- switch( m_ChunkVersion )
+ switch (m_ChunkVersion)
{
- case 1:
- m_ChunkSize.Set(16, 128, 16);
- break;
- case 2:
- case 3:
- m_ChunkSize.Set(16, 256, 16);
- break;
- default:
- LOGERROR("File \"%s\" is in an unknown chunk format (%d)", m_FileName.c_str(), m_ChunkVersion);
- return;
+ case 1:
+ {
+ m_ChunkSize.Set(16, 128, 16);
+ break;
+ }
+ case 2:
+ case 3:
+ {
+ m_ChunkSize.Set(16, 256, 16);
+ break;
+ }
+ default:
+ {
+ LOGERROR("File \"%s\" is in an unknown chunk format (%d)", m_FileName.c_str(), m_ChunkVersion);
+ return;
+ }
};
short NumChunks = 0;
@@ -486,12 +492,12 @@ cWSSCompact::cPAKFile::cPAKFile(const AString & a_FileName, int a_LayerX, int a_
return;
}
- if( m_ChunkVersion == 1 ) // Convert chunks to version 2
+ if (m_ChunkVersion == 1) // Convert chunks to version 2
{
UpdateChunk1To2();
}
#if AXIS_ORDER == AXIS_ORDER_XZY
- if( m_ChunkVersion == 2 ) // Convert chunks to version 3
+ if (m_ChunkVersion == 2) // Convert chunks to version 3
{
UpdateChunk2To3();
}
@@ -574,9 +580,9 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
{
sChunkHeader * Header = *itr;
- if( ChunksConverted % 32 == 0 )
+ if (ChunksConverted % 32 == 0)
{
- LOGINFO("Updating \"%s\" version 1 to version 2: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size() );
+ LOGINFO("Updating \"%s\" version 1 to version 2: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size());
}
ChunksConverted++;
@@ -627,9 +633,9 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
char ConvertedData[cChunkDef::BlockDataSize];
int Index = 0;
unsigned int InChunkOffset = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z )
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z)
{
- for( int y = 0; y < 128; ++y )
+ for (int y = 0; y < 128; ++y)
{
ConvertedData[Index++] = UncompressedData[y + z * 128 + x * 128 * 16 + InChunkOffset];
}
@@ -638,9 +644,9 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
Index += 128;
}
InChunkOffset += (16 * 128 * 16);
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Metadata
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) // Metadata
{
- for( int y = 0; y < 64; ++y )
+ for (int y = 0; y < 64; ++y)
{
ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset];
}
@@ -648,9 +654,9 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
Index += 64;
}
InChunkOffset += (16 * 128 * 16) / 2;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Block light
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) // Block light
{
- for( int y = 0; y < 64; ++y )
+ for (int y = 0; y < 64; ++y)
{
ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset];
}
@@ -658,9 +664,9 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
Index += 64;
}
InChunkOffset += (16*128*16)/2;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Sky light
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) // Sky light
{
- for( int y = 0; y < 64; ++y )
+ for (int y = 0; y < 64; ++y)
{
ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset];
}
@@ -674,7 +680,7 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
// Add JSON data afterwards
if (UncompressedData.size() > InChunkOffset)
{
- Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end() );
+ Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end());
}
// Re-compress data
@@ -702,7 +708,7 @@ void cWSSCompact::cPAKFile::UpdateChunk1To2()
m_ChunkVersion = 2;
SynchronizeFile();
- LOGINFO("Updated \"%s\" version 1 to version 2", m_FileName.c_str() );
+ LOGINFO("Updated \"%s\" version 1 to version 2", m_FileName.c_str());
}
@@ -718,9 +724,9 @@ void cWSSCompact::cPAKFile::UpdateChunk2To3()
{
sChunkHeader * Header = *itr;
- if( ChunksConverted % 32 == 0 )
+ if (ChunksConverted % 32 == 0)
{
- LOGINFO("Updating \"%s\" version 2 to version 3: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size() );
+ LOGINFO("Updating \"%s\" version 2 to version 3: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size());
}
ChunksConverted++;
@@ -771,10 +777,10 @@ void cWSSCompact::cPAKFile::UpdateChunk2To3()
// Cannot use cChunk::MakeIndex because it might change again?????????
// For compatibility, use what we know is current
- #define MAKE_3_INDEX( x, y, z ) ( x + (z * 16) + (y * 16 * 16) )
+ #define MAKE_3_INDEX( x, y, z) ( x + (z * 16) + (y * 16 * 16))
unsigned int InChunkOffset = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y ) // YZX Loop order is important, in 1.1 Y was first then Z then X
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y) // YZX Loop order is important, in 1.1 Y was first then Z then X
{
ConvertedData[ MAKE_3_INDEX(x, y, z) ] = UncompressedData[InChunkOffset];
++InChunkOffset;
@@ -782,25 +788,25 @@ void cWSSCompact::cPAKFile::UpdateChunk2To3()
unsigned int index2 = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y )
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y)
{
- ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4);
+ ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4)) & 0x0f) << ((x&1)*4);
++index2;
}
InChunkOffset += index2 / 2;
index2 = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y )
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y)
{
- ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4);
+ ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4)) & 0x0f) << ((x&1)*4);
++index2;
}
InChunkOffset += index2 / 2;
index2 = 0;
- for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y )
+ for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y)
{
- ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4);
+ ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4)) & 0x0f) << ((x&1)*4);
++index2;
}
InChunkOffset += index2 / 2;
@@ -810,7 +816,7 @@ void cWSSCompact::cPAKFile::UpdateChunk2To3()
// Add JSON data afterwards
if (UncompressedData.size() > InChunkOffset)
{
- Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end() );
+ Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end());
}
// Re-compress data
@@ -838,7 +844,7 @@ void cWSSCompact::cPAKFile::UpdateChunk2To3()
m_ChunkVersion = 3;
SynchronizeFile();
- LOGINFO("Updated \"%s\" version 2 to version 3", m_FileName.c_str() );
+ LOGINFO("Updated \"%s\" version 2 to version 3", m_FileName.c_str());
}
@@ -887,7 +893,7 @@ bool cWSSCompact::LoadChunkFromData(const cChunkCoords & a_Chunk, int a_Uncompre
{
Json::Value root; // will contain the root value after parsing.
Json::Reader reader;
- if ( !reader.parse( UncompressedData.data() + cChunkDef::BlockDataSize, root, false ) )
+ if (!reader.parse( UncompressedData.data() + cChunkDef::BlockDataSize, root, false))
{
LOGERROR("Failed to parse trailing JSON in chunk [%d, %d]!",
a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ
@@ -970,7 +976,7 @@ bool cWSSCompact::cPAKFile::SaveChunkToData(const cChunkCoords & a_Chunk, cWorld
// Compress the data:
AString CompressedData;
int errorcode = CompressString(Data.data(), Data.size(), CompressedData, m_CompressionFactor);
- if ( errorcode != Z_OK )
+ if (errorcode != Z_OK)
{
LOGERROR("Error %i compressing data for chunk [%d, %d, %d]", errorcode, a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ);
return false;
diff --git a/src/WorldStorage/WorldStorage.cpp b/src/WorldStorage/WorldStorage.cpp
index 155aa6b14..707e8f929 100644
--- a/src/WorldStorage/WorldStorage.cpp
+++ b/src/WorldStorage/WorldStorage.cpp
@@ -61,7 +61,7 @@ cWorldStorage::~cWorldStorage()
-bool cWorldStorage::Start(cWorld * a_World, const AString & a_StorageSchemaName, int a_StorageCompressionFactor )
+bool cWorldStorage::Start(cWorld * a_World, const AString & a_StorageSchemaName, int a_StorageCompressionFactor)
{
m_World = a_World;
m_StorageSchemaName = a_StorageSchemaName;
@@ -163,7 +163,7 @@ void cWorldStorage::QueueSaveChunk(int a_ChunkX, int a_ChunkY, int a_ChunkZ)
void cWorldStorage::UnqueueLoad(int a_ChunkX, int a_ChunkY, int a_ChunkZ)
{
- m_LoadQueue.Remove(sChunkLoad(a_ChunkX, a_ChunkY, a_ChunkZ,true));
+ m_LoadQueue.Remove(sChunkLoad(a_ChunkX, a_ChunkY, a_ChunkZ, true));
}
@@ -182,8 +182,8 @@ void cWorldStorage::UnqueueSave(const cChunkCoords & a_Chunk)
void cWorldStorage::InitSchemas(int a_StorageCompressionFactor)
{
// The first schema added is considered the default
- m_Schemas.push_back(new cWSSAnvil (m_World,a_StorageCompressionFactor));
- m_Schemas.push_back(new cWSSCompact (m_World,a_StorageCompressionFactor));
+ m_Schemas.push_back(new cWSSAnvil (m_World, a_StorageCompressionFactor));
+ m_Schemas.push_back(new cWSSCompact (m_World, a_StorageCompressionFactor));
m_Schemas.push_back(new cWSSForgetful(m_World));
// Add new schemas here
diff --git a/src/WorldStorage/WorldStorage.h b/src/WorldStorage/WorldStorage.h
index bf764a539..2d5d9c830 100644
--- a/src/WorldStorage/WorldStorage.h
+++ b/src/WorldStorage/WorldStorage.h
@@ -101,15 +101,16 @@ protected:
}
} ;
- struct FuncTable {
+ struct FuncTable
+ {
static void Delete(sChunkLoad) {};
- static void Combine(sChunkLoad& a_orig, const sChunkLoad a_new)
+ static void Combine(sChunkLoad & a_orig, const sChunkLoad a_new)
{
a_orig.m_Generate |= a_new.m_Generate;
};
};
- typedef cQueue<sChunkLoad,FuncTable> sChunkLoadQueue;
+ typedef cQueue<sChunkLoad, FuncTable> sChunkLoadQueue;
cWorld * m_World;
AString m_StorageSchemaName;