mirror of
https://github.com/luanti-org/luanti.git
synced 2025-08-11 17:51:04 +00:00
Migrate to STL containers/algorithms.
This commit is contained in:
parent
e204bedf1d
commit
6a1670dbc3
63 changed files with 1330 additions and 1417 deletions
394
src/server.cpp
394
src/server.cpp
|
@ -126,7 +126,7 @@ v3f ServerSoundParams::getPos(ServerEnvironment *env, bool *pos_exists) const
|
|||
}
|
||||
|
||||
void RemoteClient::GetNextBlocks(Server *server, float dtime,
|
||||
core::array<PrioritySortedBlockTransfer> &dest)
|
||||
std::vector<PrioritySortedBlockTransfer> &dest)
|
||||
{
|
||||
DSTACK(__FUNCTION_NAME);
|
||||
|
||||
|
@ -274,11 +274,11 @@ void RemoteClient::GetNextBlocks(Server *server, float dtime,
|
|||
Get the border/face dot coordinates of a "d-radiused"
|
||||
box
|
||||
*/
|
||||
core::list<v3s16> list;
|
||||
std::list<v3s16> list;
|
||||
getFacePositions(list, d);
|
||||
|
||||
core::list<v3s16>::Iterator li;
|
||||
for(li=list.begin(); li!=list.end(); li++)
|
||||
std::list<v3s16>::iterator li;
|
||||
for(li=list.begin(); li!=list.end(); ++li)
|
||||
{
|
||||
v3s16 p = *li + center;
|
||||
|
||||
|
@ -305,7 +305,7 @@ void RemoteClient::GetNextBlocks(Server *server, float dtime,
|
|||
}
|
||||
|
||||
// Don't send blocks that are currently being transferred
|
||||
if(m_blocks_sending.find(p) != NULL)
|
||||
if(m_blocks_sending.find(p) != m_blocks_sending.end())
|
||||
continue;
|
||||
|
||||
/*
|
||||
|
@ -382,7 +382,7 @@ void RemoteClient::GetNextBlocks(Server *server, float dtime,
|
|||
Don't send already sent blocks
|
||||
*/
|
||||
{
|
||||
if(m_blocks_sent.find(p) != NULL)
|
||||
if(m_blocks_sent.find(p) != m_blocks_sent.end())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
@ -554,21 +554,21 @@ queue_full_break:
|
|||
|
||||
void RemoteClient::GotBlock(v3s16 p)
|
||||
{
|
||||
if(m_blocks_sending.find(p) != NULL)
|
||||
m_blocks_sending.remove(p);
|
||||
if(m_blocks_sending.find(p) != m_blocks_sending.end())
|
||||
m_blocks_sending.erase(p);
|
||||
else
|
||||
{
|
||||
/*infostream<<"RemoteClient::GotBlock(): Didn't find in"
|
||||
" m_blocks_sending"<<std::endl;*/
|
||||
m_excess_gotblocks++;
|
||||
}
|
||||
m_blocks_sent.insert(p, true);
|
||||
m_blocks_sent.insert(p);
|
||||
}
|
||||
|
||||
void RemoteClient::SentBlock(v3s16 p)
|
||||
{
|
||||
if(m_blocks_sending.find(p) == NULL)
|
||||
m_blocks_sending.insert(p, 0.0);
|
||||
if(m_blocks_sending.find(p) == m_blocks_sending.end())
|
||||
m_blocks_sending[p] = 0.0;
|
||||
else
|
||||
infostream<<"RemoteClient::SentBlock(): Sent block"
|
||||
" already in m_blocks_sending"<<std::endl;
|
||||
|
@ -578,26 +578,26 @@ void RemoteClient::SetBlockNotSent(v3s16 p)
|
|||
{
|
||||
m_nearest_unsent_d = 0;
|
||||
|
||||
if(m_blocks_sending.find(p) != NULL)
|
||||
m_blocks_sending.remove(p);
|
||||
if(m_blocks_sent.find(p) != NULL)
|
||||
m_blocks_sent.remove(p);
|
||||
if(m_blocks_sending.find(p) != m_blocks_sending.end())
|
||||
m_blocks_sending.erase(p);
|
||||
if(m_blocks_sent.find(p) != m_blocks_sent.end())
|
||||
m_blocks_sent.erase(p);
|
||||
}
|
||||
|
||||
void RemoteClient::SetBlocksNotSent(core::map<v3s16, MapBlock*> &blocks)
|
||||
void RemoteClient::SetBlocksNotSent(std::map<v3s16, MapBlock*> &blocks)
|
||||
{
|
||||
m_nearest_unsent_d = 0;
|
||||
|
||||
for(core::map<v3s16, MapBlock*>::Iterator
|
||||
i = blocks.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::map<v3s16, MapBlock*>::iterator
|
||||
i = blocks.begin();
|
||||
i != blocks.end(); ++i)
|
||||
{
|
||||
v3s16 p = i.getNode()->getKey();
|
||||
v3s16 p = i->first;
|
||||
|
||||
if(m_blocks_sending.find(p) != NULL)
|
||||
m_blocks_sending.remove(p);
|
||||
if(m_blocks_sent.find(p) != NULL)
|
||||
m_blocks_sent.remove(p);
|
||||
if(m_blocks_sending.find(p) != m_blocks_sending.end())
|
||||
m_blocks_sending.erase(p);
|
||||
if(m_blocks_sent.find(p) != m_blocks_sent.end())
|
||||
m_blocks_sent.erase(p);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -854,13 +854,13 @@ Server::~Server()
|
|||
/*
|
||||
Send the message to clients
|
||||
*/
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
// Get client and check that it is valid
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
|
||||
|
@ -909,13 +909,13 @@ Server::~Server()
|
|||
{
|
||||
JMutexAutoLock clientslock(m_con_mutex);
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
|
||||
// Delete client
|
||||
delete i.getNode()->getValue();
|
||||
delete i->second;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1073,11 +1073,11 @@ void Server::AsyncRunStep()
|
|||
//JMutexAutoLock envlock(m_env_mutex);
|
||||
JMutexAutoLock conlock(m_con_mutex);
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
SharedBuffer<u8> data = makePacket_TOCLIENT_TIME_OF_DAY(
|
||||
m_env->getTimeOfDay(), g_settings->getFloat("time_speed"));
|
||||
// Send as reliable
|
||||
|
@ -1117,11 +1117,11 @@ void Server::AsyncRunStep()
|
|||
|
||||
ScopeProfiler sp(g_profiler, "Server: handle players");
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
PlayerSAO *playersao = getPlayerSAO(client->peer_id);
|
||||
if(playersao == NULL)
|
||||
continue;
|
||||
|
@ -1161,7 +1161,7 @@ void Server::AsyncRunStep()
|
|||
|
||||
ScopeProfiler sp(g_profiler, "Server: liquid transform");
|
||||
|
||||
core::map<v3s16, MapBlock*> modified_blocks;
|
||||
std::map<v3s16, MapBlock*> modified_blocks;
|
||||
m_env->getMap().transformLiquids(modified_blocks);
|
||||
#if 0
|
||||
/*
|
||||
|
@ -1186,11 +1186,11 @@ void Server::AsyncRunStep()
|
|||
|
||||
JMutexAutoLock lock2(m_con_mutex);
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
|
||||
if(modified_blocks.size() > 0)
|
||||
{
|
||||
|
@ -1212,12 +1212,12 @@ void Server::AsyncRunStep()
|
|||
m_clients_number = 0;
|
||||
if(m_clients.size() != 0)
|
||||
infostream<<"Players:"<<std::endl;
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
//u16 peer_id = i.getNode()->getKey();
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
Player *player = m_env->getPlayer(client->peer_id);
|
||||
if(player==NULL)
|
||||
continue;
|
||||
|
@ -1259,11 +1259,11 @@ void Server::AsyncRunStep()
|
|||
s16 radius = g_settings->getS16("active_object_send_range_blocks");
|
||||
radius *= MAP_BLOCKSIZE;
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
|
||||
// If definitions and textures have not been sent, don't
|
||||
// send objects either
|
||||
|
@ -1281,8 +1281,8 @@ void Server::AsyncRunStep()
|
|||
}
|
||||
v3s16 pos = floatToInt(player->getPosition(), BS);
|
||||
|
||||
core::map<u16, bool> removed_objects;
|
||||
core::map<u16, bool> added_objects;
|
||||
std::set<u16> removed_objects;
|
||||
std::set<u16> added_objects;
|
||||
m_env->getRemovedActiveObjects(pos, radius,
|
||||
client->m_known_objects, removed_objects);
|
||||
m_env->getAddedActiveObjects(pos, radius,
|
||||
|
@ -1302,20 +1302,20 @@ void Server::AsyncRunStep()
|
|||
// Handle removed objects
|
||||
writeU16((u8*)buf, removed_objects.size());
|
||||
data_buffer.append(buf, 2);
|
||||
for(core::map<u16, bool>::Iterator
|
||||
i = removed_objects.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::set<u16>::iterator
|
||||
i = removed_objects.begin();
|
||||
i != removed_objects.end(); ++i)
|
||||
{
|
||||
// Get object
|
||||
u16 id = i.getNode()->getKey();
|
||||
u16 id = *i;
|
||||
ServerActiveObject* obj = m_env->getActiveObject(id);
|
||||
|
||||
// Add to data buffer for sending
|
||||
writeU16((u8*)buf, i.getNode()->getKey());
|
||||
writeU16((u8*)buf, id);
|
||||
data_buffer.append(buf, 2);
|
||||
|
||||
// Remove from known objects
|
||||
client->m_known_objects.remove(i.getNode()->getKey());
|
||||
client->m_known_objects.erase(id);
|
||||
|
||||
if(obj && obj->m_known_by_count > 0)
|
||||
obj->m_known_by_count--;
|
||||
|
@ -1324,12 +1324,12 @@ void Server::AsyncRunStep()
|
|||
// Handle added objects
|
||||
writeU16((u8*)buf, added_objects.size());
|
||||
data_buffer.append(buf, 2);
|
||||
for(core::map<u16, bool>::Iterator
|
||||
i = added_objects.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::set<u16>::iterator
|
||||
i = added_objects.begin();
|
||||
i != added_objects.end(); ++i)
|
||||
{
|
||||
// Get object
|
||||
u16 id = i.getNode()->getKey();
|
||||
u16 id = *i;
|
||||
ServerActiveObject* obj = m_env->getActiveObject(id);
|
||||
|
||||
// Get object type
|
||||
|
@ -1353,7 +1353,7 @@ void Server::AsyncRunStep()
|
|||
data_buffer.append(serializeLongString(""));
|
||||
|
||||
// Add to known objects
|
||||
client->m_known_objects.insert(i.getNode()->getKey(), false);
|
||||
client->m_known_objects.insert(id);
|
||||
|
||||
if(obj)
|
||||
obj->m_known_by_count++;
|
||||
|
@ -1412,7 +1412,7 @@ void Server::AsyncRunStep()
|
|||
|
||||
// Key = object id
|
||||
// Value = data sent by object
|
||||
core::map<u16, core::list<ActiveObjectMessage>* > buffered_messages;
|
||||
std::map<u16, std::list<ActiveObjectMessage>* > buffered_messages;
|
||||
|
||||
// Get active object messages from environment
|
||||
for(;;)
|
||||
|
@ -1421,43 +1421,43 @@ void Server::AsyncRunStep()
|
|||
if(aom.id == 0)
|
||||
break;
|
||||
|
||||
core::list<ActiveObjectMessage>* message_list = NULL;
|
||||
core::map<u16, core::list<ActiveObjectMessage>* >::Node *n;
|
||||
std::list<ActiveObjectMessage>* message_list = NULL;
|
||||
std::map<u16, std::list<ActiveObjectMessage>* >::iterator n;
|
||||
n = buffered_messages.find(aom.id);
|
||||
if(n == NULL)
|
||||
if(n == buffered_messages.end())
|
||||
{
|
||||
message_list = new core::list<ActiveObjectMessage>;
|
||||
buffered_messages.insert(aom.id, message_list);
|
||||
message_list = new std::list<ActiveObjectMessage>;
|
||||
buffered_messages[aom.id] = message_list;
|
||||
}
|
||||
else
|
||||
{
|
||||
message_list = n->getValue();
|
||||
message_list = n->second;
|
||||
}
|
||||
message_list->push_back(aom);
|
||||
}
|
||||
|
||||
// Route data to every client
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
std::string reliable_data;
|
||||
std::string unreliable_data;
|
||||
// Go through all objects in message buffer
|
||||
for(core::map<u16, core::list<ActiveObjectMessage>* >::Iterator
|
||||
j = buffered_messages.getIterator();
|
||||
j.atEnd()==false; j++)
|
||||
for(std::map<u16, std::list<ActiveObjectMessage>* >::iterator
|
||||
j = buffered_messages.begin();
|
||||
j != buffered_messages.end(); ++j)
|
||||
{
|
||||
// If object is not known by client, skip it
|
||||
u16 id = j.getNode()->getKey();
|
||||
if(client->m_known_objects.find(id) == NULL)
|
||||
u16 id = j->first;
|
||||
if(client->m_known_objects.find(id) == client->m_known_objects.end())
|
||||
continue;
|
||||
// Get message list of object
|
||||
core::list<ActiveObjectMessage>* list = j.getNode()->getValue();
|
||||
std::list<ActiveObjectMessage>* list = j->second;
|
||||
// Go through every message
|
||||
for(core::list<ActiveObjectMessage>::Iterator
|
||||
k = list->begin(); k != list->end(); k++)
|
||||
for(std::list<ActiveObjectMessage>::iterator
|
||||
k = list->begin(); k != list->end(); ++k)
|
||||
{
|
||||
// Compose the full new data with header
|
||||
ActiveObjectMessage aom = *k;
|
||||
|
@ -1508,11 +1508,11 @@ void Server::AsyncRunStep()
|
|||
}
|
||||
|
||||
// Clear buffered_messages
|
||||
for(core::map<u16, core::list<ActiveObjectMessage>* >::Iterator
|
||||
i = buffered_messages.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::map<u16, std::list<ActiveObjectMessage>* >::iterator
|
||||
i = buffered_messages.begin();
|
||||
i != buffered_messages.end(); ++i)
|
||||
{
|
||||
delete i.getNode()->getValue();
|
||||
delete i->second;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1546,7 +1546,7 @@ void Server::AsyncRunStep()
|
|||
// Players far away from the change are stored here.
|
||||
// Instead of sending the changes, MapBlocks are set not sent
|
||||
// for them.
|
||||
core::list<u16> far_players;
|
||||
std::list<u16> far_players;
|
||||
|
||||
if(event->type == MEET_ADDNODE)
|
||||
{
|
||||
|
@ -1580,12 +1580,11 @@ void Server::AsyncRunStep()
|
|||
{
|
||||
infostream<<"Server: MEET_OTHER"<<std::endl;
|
||||
prof.add("MEET_OTHER", 1);
|
||||
for(core::map<v3s16, bool>::Iterator
|
||||
i = event->modified_blocks.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::set<v3s16>::iterator
|
||||
i = event->modified_blocks.begin();
|
||||
i != event->modified_blocks.end(); ++i)
|
||||
{
|
||||
v3s16 p = i.getNode()->getKey();
|
||||
setBlockNotSent(p);
|
||||
setBlockNotSent(*i);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -1601,19 +1600,18 @@ void Server::AsyncRunStep()
|
|||
if(far_players.size() > 0)
|
||||
{
|
||||
// Convert list format to that wanted by SetBlocksNotSent
|
||||
core::map<v3s16, MapBlock*> modified_blocks2;
|
||||
for(core::map<v3s16, bool>::Iterator
|
||||
i = event->modified_blocks.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
std::map<v3s16, MapBlock*> modified_blocks2;
|
||||
for(std::set<v3s16>::iterator
|
||||
i = event->modified_blocks.begin();
|
||||
i != event->modified_blocks.end(); ++i)
|
||||
{
|
||||
v3s16 p = i.getNode()->getKey();
|
||||
modified_blocks2.insert(p,
|
||||
m_env->getMap().getBlockNoCreateNoEx(p));
|
||||
modified_blocks2[*i] =
|
||||
m_env->getMap().getBlockNoCreateNoEx(*i);
|
||||
}
|
||||
// Set blocks not sent
|
||||
for(core::list<u16>::Iterator
|
||||
for(std::list<u16>::iterator
|
||||
i = far_players.begin();
|
||||
i != far_players.end(); i++)
|
||||
i != far_players.end(); ++i)
|
||||
{
|
||||
u16 peer_id = *i;
|
||||
RemoteClient *client = getClient(peer_id);
|
||||
|
@ -1792,7 +1790,7 @@ void Server::ProcessData(u8 *data, u32 datasize, u16 peer_id)
|
|||
u8 client_max = data[2];
|
||||
u8 our_max = SER_FMT_VER_HIGHEST;
|
||||
// Use the highest version supported by both
|
||||
u8 deployed = core::min_(client_max, our_max);
|
||||
u8 deployed = std::min(client_max, our_max);
|
||||
// If it's lower than the lowest supported, give up.
|
||||
if(deployed < SER_FMT_VER_LOWEST)
|
||||
deployed = SER_FMT_VER_INVALID;
|
||||
|
@ -2143,12 +2141,12 @@ void Server::ProcessData(u8 *data, u32 datasize, u16 peer_id)
|
|||
*/
|
||||
{
|
||||
std::ostringstream os(std::ios_base::binary);
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
// Get player
|
||||
|
@ -2520,13 +2518,13 @@ void Server::ProcessData(u8 *data, u32 datasize, u16 peer_id)
|
|||
/*
|
||||
Send the message to clients
|
||||
*/
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
// Get client and check that it is valid
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
|
||||
|
@ -2651,7 +2649,7 @@ void Server::ProcessData(u8 *data, u32 datasize, u16 peer_id)
|
|||
std::string datastring((char*)&data[2], datasize-2);
|
||||
std::istringstream is(datastring, std::ios_base::binary);
|
||||
|
||||
core::list<MediaRequest> tosend;
|
||||
std::list<MediaRequest> tosend;
|
||||
u16 numfiles = readU16(is);
|
||||
|
||||
infostream<<"Sending "<<numfiles<<" files to "
|
||||
|
@ -3189,19 +3187,19 @@ void Server::setInventoryModified(const InventoryLocation &loc)
|
|||
}
|
||||
}
|
||||
|
||||
core::list<PlayerInfo> Server::getPlayerInfo()
|
||||
std::list<PlayerInfo> Server::getPlayerInfo()
|
||||
{
|
||||
DSTACK(__FUNCTION_NAME);
|
||||
JMutexAutoLock envlock(m_env_mutex);
|
||||
JMutexAutoLock conlock(m_con_mutex);
|
||||
|
||||
core::list<PlayerInfo> list;
|
||||
std::list<PlayerInfo> list;
|
||||
|
||||
core::list<Player*> players = m_env->getPlayers();
|
||||
std::list<Player*> players = m_env->getPlayers();
|
||||
|
||||
core::list<Player*>::Iterator i;
|
||||
std::list<Player*>::iterator i;
|
||||
for(i = players.begin();
|
||||
i != players.end(); i++)
|
||||
i != players.end(); ++i)
|
||||
{
|
||||
PlayerInfo info;
|
||||
|
||||
|
@ -3470,13 +3468,13 @@ void Server::SendShowFormspecMessage(u16 peer_id, const std::string formspec, co
|
|||
|
||||
void Server::BroadcastChatMessage(const std::wstring &message)
|
||||
{
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
// Get client and check that it is valid
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
|
||||
|
@ -3595,10 +3593,10 @@ s32 Server::playSound(const SimpleSoundSpec &spec,
|
|||
}
|
||||
else
|
||||
{
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator(); i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin(); i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
Player *player = m_env->getPlayer(client->peer_id);
|
||||
if(!player)
|
||||
continue;
|
||||
|
@ -3668,7 +3666,7 @@ void Server::stopSound(s32 handle)
|
|||
}
|
||||
|
||||
void Server::sendRemoveNode(v3s16 p, u16 ignore_id,
|
||||
core::list<u16> *far_players, float far_d_nodes)
|
||||
std::list<u16> *far_players, float far_d_nodes)
|
||||
{
|
||||
float maxd = far_d_nodes*BS;
|
||||
v3f p_f = intToFloat(p, BS);
|
||||
|
@ -3681,13 +3679,13 @@ void Server::sendRemoveNode(v3s16 p, u16 ignore_id,
|
|||
writeS16(&reply[4], p.Y);
|
||||
writeS16(&reply[6], p.Z);
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
// Get client and check that it is valid
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
|
||||
|
@ -3717,18 +3715,18 @@ void Server::sendRemoveNode(v3s16 p, u16 ignore_id,
|
|||
}
|
||||
|
||||
void Server::sendAddNode(v3s16 p, MapNode n, u16 ignore_id,
|
||||
core::list<u16> *far_players, float far_d_nodes)
|
||||
std::list<u16> *far_players, float far_d_nodes)
|
||||
{
|
||||
float maxd = far_d_nodes*BS;
|
||||
v3f p_f = intToFloat(p, BS);
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
// Get client and check that it is valid
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
|
||||
|
@ -3768,11 +3766,11 @@ void Server::sendAddNode(v3s16 p, MapNode n, u16 ignore_id,
|
|||
|
||||
void Server::setBlockNotSent(v3s16 p)
|
||||
{
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
RemoteClient *client = i->second;
|
||||
client->SetBlockNotSent(p);
|
||||
}
|
||||
}
|
||||
|
@ -3839,19 +3837,19 @@ void Server::SendBlocks(float dtime)
|
|||
|
||||
ScopeProfiler sp(g_profiler, "Server: sel and send blocks to clients");
|
||||
|
||||
core::array<PrioritySortedBlockTransfer> queue;
|
||||
std::vector<PrioritySortedBlockTransfer> queue;
|
||||
|
||||
s32 total_sending = 0;
|
||||
|
||||
{
|
||||
ScopeProfiler sp(g_profiler, "Server: selecting blocks for sending");
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
|
||||
// If definitions and textures have not been sent, don't
|
||||
// send MapBlocks either
|
||||
|
@ -3870,7 +3868,7 @@ void Server::SendBlocks(float dtime)
|
|||
// Sort.
|
||||
// Lowest priority number comes first.
|
||||
// Lowest is most important.
|
||||
queue.sort();
|
||||
std::sort(queue.begin(), queue.end());
|
||||
|
||||
for(u32 i=0; i<queue.size(); i++)
|
||||
{
|
||||
|
@ -4017,7 +4015,7 @@ void Server::sendMediaAnnouncement(u16 peer_id)
|
|||
verbosestream<<"Server: Announcing files to id("<<peer_id<<")"
|
||||
<<std::endl;
|
||||
|
||||
core::list<SendableMediaAnnouncement> file_announcements;
|
||||
std::list<SendableMediaAnnouncement> file_announcements;
|
||||
|
||||
for(std::map<std::string, MediaInfo>::iterator i = m_media.begin();
|
||||
i != m_media.end(); i++){
|
||||
|
@ -4043,9 +4041,9 @@ void Server::sendMediaAnnouncement(u16 peer_id)
|
|||
writeU16(os, TOCLIENT_ANNOUNCE_MEDIA);
|
||||
writeU16(os, file_announcements.size());
|
||||
|
||||
for(core::list<SendableMediaAnnouncement>::Iterator
|
||||
for(std::list<SendableMediaAnnouncement>::iterator
|
||||
j = file_announcements.begin();
|
||||
j != file_announcements.end(); j++){
|
||||
j != file_announcements.end(); ++j){
|
||||
os<<serializeString(j->name);
|
||||
os<<serializeString(j->sha1_digest);
|
||||
}
|
||||
|
@ -4074,7 +4072,7 @@ struct SendableMedia
|
|||
};
|
||||
|
||||
void Server::sendRequestedMedia(u16 peer_id,
|
||||
const core::list<MediaRequest> &tosend)
|
||||
const std::list<MediaRequest> &tosend)
|
||||
{
|
||||
DSTACK(__FUNCTION_NAME);
|
||||
|
||||
|
@ -4086,13 +4084,13 @@ void Server::sendRequestedMedia(u16 peer_id,
|
|||
// Put 5kB in one bunch (this is not accurate)
|
||||
u32 bytes_per_bunch = 5000;
|
||||
|
||||
core::array< core::list<SendableMedia> > file_bunches;
|
||||
file_bunches.push_back(core::list<SendableMedia>());
|
||||
std::vector< std::list<SendableMedia> > file_bunches;
|
||||
file_bunches.push_back(std::list<SendableMedia>());
|
||||
|
||||
u32 file_size_bunch_total = 0;
|
||||
|
||||
for(core::list<MediaRequest>::ConstIterator i = tosend.begin();
|
||||
i != tosend.end(); i++)
|
||||
for(std::list<MediaRequest>::const_iterator i = tosend.begin();
|
||||
i != tosend.end(); ++i)
|
||||
{
|
||||
if(m_media.find(i->name) == m_media.end()){
|
||||
errorstream<<"Server::sendRequestedMedia(): Client asked for "
|
||||
|
@ -4138,7 +4136,7 @@ void Server::sendRequestedMedia(u16 peer_id,
|
|||
|
||||
// Start next bunch if got enough data
|
||||
if(file_size_bunch_total >= bytes_per_bunch){
|
||||
file_bunches.push_back(core::list<SendableMedia>());
|
||||
file_bunches.push_back(std::list<SendableMedia>());
|
||||
file_size_bunch_total = 0;
|
||||
}
|
||||
|
||||
|
@ -4169,9 +4167,9 @@ void Server::sendRequestedMedia(u16 peer_id,
|
|||
writeU16(os, i);
|
||||
writeU32(os, file_bunches[i].size());
|
||||
|
||||
for(core::list<SendableMedia>::Iterator
|
||||
for(std::list<SendableMedia>::iterator
|
||||
j = file_bunches[i].begin();
|
||||
j != file_bunches[i].end(); j++){
|
||||
j != file_bunches[i].end(); ++j){
|
||||
os<<serializeString(j->name);
|
||||
os<<serializeLongString(j->data);
|
||||
}
|
||||
|
@ -4212,10 +4210,10 @@ void Server::sendDetachedInventoryToAll(const std::string &name)
|
|||
{
|
||||
DSTACK(__FUNCTION_NAME);
|
||||
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++){
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i){
|
||||
RemoteClient *client = i->second;
|
||||
sendDetachedInventory(name, client->peer_id);
|
||||
}
|
||||
}
|
||||
|
@ -4299,11 +4297,11 @@ RemoteClient* Server::getClient(u16 peer_id)
|
|||
{
|
||||
DSTACK(__FUNCTION_NAME);
|
||||
//JMutexAutoLock lock(m_con_mutex);
|
||||
core::map<u16, RemoteClient*>::Node *n;
|
||||
std::map<u16, RemoteClient*>::iterator n;
|
||||
n = m_clients.find(peer_id);
|
||||
// A client should exist for all peers
|
||||
assert(n != NULL);
|
||||
return n->getValue();
|
||||
assert(n != m_clients.end());
|
||||
return n->second;
|
||||
}
|
||||
|
||||
std::wstring Server::getStatusString()
|
||||
|
@ -4315,15 +4313,15 @@ std::wstring Server::getStatusString()
|
|||
// Uptime
|
||||
os<<L", uptime="<<m_uptime.get();
|
||||
// Information about clients
|
||||
core::map<u16, RemoteClient*>::Iterator i;
|
||||
std::map<u16, RemoteClient*>::iterator i;
|
||||
bool first;
|
||||
os<<L", clients={";
|
||||
for(i = m_clients.getIterator(), first = true;
|
||||
i.atEnd() == false; i++)
|
||||
for(i = m_clients.begin(), first = true;
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
// Get client and check that it is valid
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
// Get player
|
||||
|
@ -4363,10 +4361,10 @@ bool Server::checkPriv(const std::string &name, const std::string &priv)
|
|||
void Server::reportPrivsModified(const std::string &name)
|
||||
{
|
||||
if(name == ""){
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++){
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i){
|
||||
RemoteClient *client = i->second;
|
||||
Player *player = m_env->getPlayer(client->peer_id);
|
||||
reportPrivsModified(player->getName());
|
||||
}
|
||||
|
@ -4579,11 +4577,11 @@ const ModSpec* Server::getModSpec(const std::string &modname)
|
|||
}
|
||||
return NULL;
|
||||
}
|
||||
void Server::getModNames(core::list<std::string> &modlist)
|
||||
void Server::getModNames(std::list<std::string> &modlist)
|
||||
{
|
||||
for(std::vector<ModSpec>::iterator i = m_mods.begin(); i != m_mods.end(); i++)
|
||||
{
|
||||
modlist.push_back((*i).name);
|
||||
modlist.push_back(i->name);
|
||||
}
|
||||
}
|
||||
std::string Server::getBuiltinLuaPath()
|
||||
|
@ -4733,15 +4731,15 @@ void Server::handlePeerChange(PeerChange &c)
|
|||
*/
|
||||
|
||||
// Error check
|
||||
core::map<u16, RemoteClient*>::Node *n;
|
||||
std::map<u16, RemoteClient*>::iterator n;
|
||||
n = m_clients.find(c.peer_id);
|
||||
// The client shouldn't already exist
|
||||
assert(n == NULL);
|
||||
assert(n == m_clients.end());
|
||||
|
||||
// Create client
|
||||
RemoteClient *client = new RemoteClient();
|
||||
client->peer_id = c.peer_id;
|
||||
m_clients.insert(client->peer_id, client);
|
||||
m_clients[client->peer_id] = client;
|
||||
|
||||
} // PEER_ADDED
|
||||
else if(c.type == PEER_REMOVED)
|
||||
|
@ -4751,22 +4749,22 @@ void Server::handlePeerChange(PeerChange &c)
|
|||
*/
|
||||
|
||||
// Error check
|
||||
core::map<u16, RemoteClient*>::Node *n;
|
||||
std::map<u16, RemoteClient*>::iterator n;
|
||||
n = m_clients.find(c.peer_id);
|
||||
// The client should exist
|
||||
assert(n != NULL);
|
||||
assert(n != m_clients.end());
|
||||
|
||||
/*
|
||||
Mark objects to be not known by the client
|
||||
*/
|
||||
RemoteClient *client = n->getValue();
|
||||
RemoteClient *client = n->second;
|
||||
// Handle objects
|
||||
for(core::map<u16, bool>::Iterator
|
||||
i = client->m_known_objects.getIterator();
|
||||
i.atEnd()==false; i++)
|
||||
for(std::set<u16>::iterator
|
||||
i = client->m_known_objects.begin();
|
||||
i != client->m_known_objects.end(); ++i)
|
||||
{
|
||||
// Get object
|
||||
u16 id = i.getNode()->getKey();
|
||||
u16 id = *i;
|
||||
ServerActiveObject* obj = m_env->getActiveObject(id);
|
||||
|
||||
if(obj && obj->m_known_by_count > 0)
|
||||
|
@ -4824,12 +4822,12 @@ void Server::handlePeerChange(PeerChange &c)
|
|||
if(player != NULL)
|
||||
{
|
||||
std::ostringstream os(std::ios_base::binary);
|
||||
for(core::map<u16, RemoteClient*>::Iterator
|
||||
i = m_clients.getIterator();
|
||||
i.atEnd() == false; i++)
|
||||
for(std::map<u16, RemoteClient*>::iterator
|
||||
i = m_clients.begin();
|
||||
i != m_clients.end(); ++i)
|
||||
{
|
||||
RemoteClient *client = i.getNode()->getValue();
|
||||
assert(client->peer_id == i.getNode()->getKey());
|
||||
RemoteClient *client = i->second;
|
||||
assert(client->peer_id == i->first);
|
||||
if(client->serialization_version == SER_FMT_VER_INVALID)
|
||||
continue;
|
||||
// Get player
|
||||
|
@ -4849,7 +4847,7 @@ void Server::handlePeerChange(PeerChange &c)
|
|||
|
||||
// Delete client
|
||||
delete m_clients[c.peer_id];
|
||||
m_clients.remove(c.peer_id);
|
||||
m_clients.erase(c.peer_id);
|
||||
|
||||
// Send player info to all remaining clients
|
||||
//SendPlayerInfos();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue