Merge branch 'master' into unreachable
This commit is contained in:
commit
1635c696de
MCServer/Plugins
src
File diff suppressed because it is too large
Load Diff
@ -217,7 +217,7 @@ function TestBlockAreasString()
|
|||||||
return
|
return
|
||||||
end
|
end
|
||||||
cFile:CreateFolder("schematics")
|
cFile:CreateFolder("schematics")
|
||||||
local f = io.open("schematics/StringTest.schematic", "w")
|
local f = io.open("schematics/StringTest.schematic", "wb")
|
||||||
f:write(Data)
|
f:write(Data)
|
||||||
f:close()
|
f:close()
|
||||||
|
|
||||||
@ -230,7 +230,7 @@ function TestBlockAreasString()
|
|||||||
BA2:Clear()
|
BA2:Clear()
|
||||||
|
|
||||||
-- Load another area from a string in that file:
|
-- Load another area from a string in that file:
|
||||||
f = io.open("schematics/StringTest.schematic", "r")
|
f = io.open("schematics/StringTest.schematic", "rb")
|
||||||
Data = f:read("*all")
|
Data = f:read("*all")
|
||||||
if not(BA2:LoadFromSchematicString(Data)) then
|
if not(BA2:LoadFromSchematicString(Data)) then
|
||||||
LOG("Cannot load schematic from string")
|
LOG("Cannot load schematic from string")
|
||||||
|
@ -131,9 +131,6 @@ void cLuaChunkStay::Enable(cChunkMap & a_ChunkMap, int a_OnChunkAvailableStackPo
|
|||||||
|
|
||||||
void cLuaChunkStay::OnChunkAvailable(int a_ChunkX, int a_ChunkZ)
|
void cLuaChunkStay::OnChunkAvailable(int a_ChunkX, int a_ChunkZ)
|
||||||
{
|
{
|
||||||
// DEBUG:
|
|
||||||
LOGD("LuaChunkStay: Chunk [%d, %d] is now available, calling the callback...", a_ChunkX, a_ChunkZ);
|
|
||||||
|
|
||||||
cPluginLua::cOperation Op(m_Plugin);
|
cPluginLua::cOperation Op(m_Plugin);
|
||||||
Op().Call((int)m_OnChunkAvailable, a_ChunkX, a_ChunkZ);
|
Op().Call((int)m_OnChunkAvailable, a_ChunkX, a_ChunkZ);
|
||||||
}
|
}
|
||||||
|
@ -1376,20 +1376,14 @@ void cChunkMap::ReplaceTreeBlocks(const sSetBlockVector & a_Blocks)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case E_BLOCK_LEAVES:
|
case E_BLOCK_LEAVES:
|
||||||
|
case E_BLOCK_NEW_LEAVES:
|
||||||
{
|
{
|
||||||
if (itr->BlockType == E_BLOCK_LOG)
|
if ((itr->BlockType == E_BLOCK_LOG) || (itr->BlockType == E_BLOCK_NEW_LOG))
|
||||||
{
|
{
|
||||||
Chunk->SetBlock(itr->x, itr->y, itr->z, itr->BlockType, itr->BlockMeta);
|
Chunk->SetBlock(itr->x, itr->y, itr->z, itr->BlockType, itr->BlockMeta);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case E_BLOCK_NEW_LEAVES:
|
|
||||||
{
|
|
||||||
if (itr->BlockType == E_BLOCK_NEW_LOG)
|
|
||||||
{
|
|
||||||
Chunk->SetBlock(itr->x, itr->y, itr->z, itr->BlockType, itr->BlockMeta);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} // for itr - a_Blocks[]
|
} // for itr - a_Blocks[]
|
||||||
}
|
}
|
||||||
|
@ -73,12 +73,15 @@ int cGZipFile::ReadRestOfFile(AString & a_Contents)
|
|||||||
|
|
||||||
// Since the gzip format doesn't really support getting the uncompressed length, we need to read incrementally. Yuck!
|
// Since the gzip format doesn't really support getting the uncompressed length, we need to read incrementally. Yuck!
|
||||||
int NumBytesRead = 0;
|
int NumBytesRead = 0;
|
||||||
|
int TotalBytes = 0;
|
||||||
char Buffer[64 KiB];
|
char Buffer[64 KiB];
|
||||||
while ((NumBytesRead = gzread(m_File, Buffer, sizeof(Buffer))) > 0)
|
while ((NumBytesRead = gzread(m_File, Buffer, sizeof(Buffer))) > 0)
|
||||||
{
|
{
|
||||||
|
TotalBytes += NumBytesRead;
|
||||||
a_Contents.append(Buffer, NumBytesRead);
|
a_Contents.append(Buffer, NumBytesRead);
|
||||||
}
|
}
|
||||||
return NumBytesRead;
|
// NumBytesRead is < 0 on error
|
||||||
|
return (NumBytesRead >= 0) ? TotalBytes : NumBytesRead;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,8 +20,14 @@ void cFireworkItem::WriteToNBTCompound(const cFireworkItem & a_FireworkItem, cFa
|
|||||||
a_Writer.AddByte("Flicker", a_FireworkItem.m_HasFlicker);
|
a_Writer.AddByte("Flicker", a_FireworkItem.m_HasFlicker);
|
||||||
a_Writer.AddByte("Trail", a_FireworkItem.m_HasTrail);
|
a_Writer.AddByte("Trail", a_FireworkItem.m_HasTrail);
|
||||||
a_Writer.AddByte("Type", a_FireworkItem.m_Type);
|
a_Writer.AddByte("Type", a_FireworkItem.m_Type);
|
||||||
a_Writer.AddIntArray("Colors", &(a_FireworkItem.m_Colours[0]), a_FireworkItem.m_Colours.size());
|
if (!a_FireworkItem.m_Colours.empty())
|
||||||
a_Writer.AddIntArray("FadeColors", &(a_FireworkItem.m_FadeColours[0]), a_FireworkItem.m_FadeColours.size());
|
{
|
||||||
|
a_Writer.AddIntArray("Colors", &(a_FireworkItem.m_Colours[0]), a_FireworkItem.m_Colours.size());
|
||||||
|
}
|
||||||
|
if (!a_FireworkItem.m_FadeColours.empty())
|
||||||
|
{
|
||||||
|
a_Writer.AddIntArray("FadeColors", &(a_FireworkItem.m_FadeColours[0]), a_FireworkItem.m_FadeColours.size());
|
||||||
|
}
|
||||||
a_Writer.EndCompound();
|
a_Writer.EndCompound();
|
||||||
a_Writer.EndList();
|
a_Writer.EndList();
|
||||||
a_Writer.EndCompound();
|
a_Writer.EndCompound();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user