1
0
Fork 0

Code improvements.

This commit is contained in:
Howaner 2014-09-19 15:07:01 +02:00
parent 0baca1f1d8
commit 6aa331a4fa
4 changed files with 36 additions and 38 deletions

View File

@ -45,7 +45,7 @@ const AString & cChunkDataSerializer::Serialize(int a_Version, int a_ChunkX, int
{
case RELEASE_1_2_5: Serialize29(data); break;
case RELEASE_1_3_2: Serialize39(data); break;
case RELEASE_1_8_0: Serialize80(data, a_ChunkX, a_ChunkZ); break;
case RELEASE_1_8_0: Serialize47(data, a_ChunkX, a_ChunkZ); break;
// TODO: Other protocol versions may serialize the data differently; implement here
default:
@ -181,43 +181,41 @@ void cChunkDataSerializer::Serialize39(AString & a_Data)
void cChunkDataSerializer::Serialize80(AString & a_Data, int a_ChunkX, int a_ChunkZ)
void cChunkDataSerializer::Serialize47(AString & a_Data, int a_ChunkX, int a_ChunkZ)
{
// TODO: Do not copy data and then compress it; rather, compress partial blocks of data (zlib *can* stream)
// Blocktypes converter (1.8 included the meta into the blocktype):
unsigned char Blocks[cChunkDef::NumBlocks * 2];
size_t LastOffset = 0;
for (size_t Index = 0; Index < cChunkDef::NumBlocks; Index++)
{
BLOCKTYPE BlockType = m_BlockTypes[Index] & 0xFF;
NIBBLETYPE BlockMeta = m_BlockMetas[Index / 2] >> ((Index & 1) * 4) & 0x0f;
Blocks[LastOffset] = (unsigned char)(BlockType << 4) | BlockMeta;
Blocks[LastOffset + 1] = (unsigned char)(BlockType >> 4);
LastOffset += 2;
}
const int BiomeDataSize = cChunkDef::Width * cChunkDef::Width;
const int BlockLightOffset = sizeof(Blocks);
const int SkyLightOffset = BlockLightOffset + sizeof(m_BlockLight);
const int BiomeOffset = SkyLightOffset + sizeof(m_BlockSkyLight);
const int DataSize = BiomeOffset + BiomeDataSize;
// Temporary buffer for the composed data:
char AllData [DataSize];
memcpy(AllData, Blocks, sizeof(Blocks));
memcpy(AllData + BlockLightOffset, m_BlockLight, sizeof(m_BlockLight));
memcpy(AllData + SkyLightOffset, m_BlockSkyLight, sizeof(m_BlockSkyLight));
memcpy(AllData + BiomeOffset, m_BiomeData, BiomeDataSize);
// This function returns the fully compressed packet (including packet size), not the raw packet!
// Create the packet:
cByteBuffer Packet(512 KiB);
Packet.WriteVarInt(0x21); // Packet id (Chunk Data packet)
Packet.WriteBEInt(a_ChunkX);
Packet.WriteBEInt(a_ChunkZ);
Packet.WriteBool(true); // "Ground-up continuous", or rather, "biome data present" flag
Packet.WriteBEShort((short) 0xffff); // We're aways sending the full chunk with no additional data, so the bitmap is 0xffff
Packet.WriteVarInt(DataSize); // Chunk size
Packet.WriteBuf(AllData, DataSize); // Chunk data
// Write the chunk size:
const int BiomeDataSize = cChunkDef::Width * cChunkDef::Width;
UInt32 ChunkSize = (
(cChunkDef::NumBlocks * 2) + // Block meta + type
sizeof(m_BlockLight) + // Block light
sizeof(m_BlockSkyLight) + // Block sky light
BiomeDataSize // Biome data
);
Packet.WriteVarInt(ChunkSize);
// Write the block types to the packet:
for (size_t Index = 0; Index < cChunkDef::NumBlocks; Index++)
{
BLOCKTYPE BlockType = m_BlockTypes[Index] & 0xFF;
NIBBLETYPE BlockMeta = m_BlockMetas[Index / 2] >> ((Index & 1) * 4) & 0x0f;
Packet.WriteByte((unsigned char)(BlockType << 4) | BlockMeta);
Packet.WriteByte((unsigned char)(BlockType >> 4));
}
// Write the rest:
Packet.WriteBuf(m_BlockLight, sizeof(m_BlockLight));
Packet.WriteBuf(m_BlockSkyLight, sizeof(m_BlockSkyLight));
Packet.WriteBuf(m_BiomeData, BiomeDataSize);
AString PacketData;
Packet.ReadAll(PacketData);
@ -236,13 +234,13 @@ void cChunkDataSerializer::Serialize80(AString & a_Data, int a_ChunkX, int a_Chu
else
{
AString PostData;
Buffer.WriteVarInt(Packet.GetUsedSpace() + 1);
Buffer.WriteVarInt((UInt32)Packet.GetUsedSpace() + 1);
Buffer.WriteVarInt(0);
Buffer.ReadAll(PostData);
Buffer.CommitRead();
a_Data.clear();
a_Data.resize(PostData.size() + PacketData.size());
a_Data.reserve(PostData.size() + PacketData.size());
a_Data.append(PostData.data(), PostData.size());
a_Data.append(PacketData.data(), PacketData.size());
}

View File

@ -24,7 +24,7 @@ protected:
void Serialize29(AString & a_Data); // Release 1.2.4 and 1.2.5
void Serialize39(AString & a_Data); // Release 1.3.1 to 1.7.10
void Serialize80(AString & a_Data, int a_ChunkX, int a_ChunkZ); // Release 1.8
void Serialize47(AString & a_Data, int a_ChunkX, int a_ChunkZ); // Release 1.8
public:
enum

View File

@ -75,6 +75,7 @@ Implements the 1.8.x protocol classes:
const int MAX_ENC_LEN = 512; // Maximum size of the encrypted message; should be 128, but who knows...
const uLongf MAX_COMPRESSED_PACKET_LEN = 200 KiB; // Maximum size of compressed packets.
@ -1513,11 +1514,10 @@ void cProtocol180::SendWindowProperty(const cWindow & a_Window, int a_Property,
bool cProtocol180::CompressPacket(const AString & a_Packet, AString & a_CompressedData)
{
// Compress the data:
const uLongf CompressedMaxSize = 200000;
char CompressedData[CompressedMaxSize];
char CompressedData[MAX_COMPRESSED_PACKET_LEN];
uLongf CompressedSize = compressBound(a_Packet.size());
if (CompressedSize >= CompressedMaxSize)
if (CompressedSize >= MAX_COMPRESSED_PACKET_LEN)
{
ASSERT(!"Too high packet size.");
return false;
@ -1541,7 +1541,7 @@ bool cProtocol180::CompressPacket(const AString & a_Packet, AString & a_Compress
Buffer.CommitRead();
a_CompressedData.clear();
a_CompressedData.resize(LengthData.size() + CompressedSize);
a_CompressedData.reserve(LengthData.size() + CompressedSize);
a_CompressedData.append(LengthData.data(), LengthData.size());
a_CompressedData.append(CompressedData, CompressedSize);
return true;

View File

@ -955,7 +955,7 @@ bool cProtocolRecognizer::TryRecognizeLengthlessProtocol(void)
m_Protocol = new cProtocol132(m_Client);
return true;
}
//case PROTO_VERSION_1_4_2:
case PROTO_VERSION_1_4_2:
case PROTO_VERSION_1_4_4:
{
m_Protocol = new cProtocol142(m_Client);