1
0
Fork 0
mirror of https://github.com/luanti-org/luanti.git synced 2025-06-27 16:36:03 +00:00

Make remote media exclusively use GET for hash set (#15885)

This commit is contained in:
sfan5 2025-03-19 22:06:34 +01:00 committed by GitHub
parent a9a3b05cc3
commit 1f14b7cb1b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 21 additions and 36 deletions

View file

@ -154,7 +154,7 @@ void ClientMediaDownloader::step(Client *client)
startRemoteMediaTransfers();
// Did all remote transfers end and no new ones can be started?
// If so, request still missing files from the minetest server
// If so, request still missing files from the server
// (Or report that we have all files.)
if (m_httpfetch_active == 0) {
if (m_uncached_received_count < m_uncached_count) {
@ -168,6 +168,15 @@ void ClientMediaDownloader::step(Client *client)
}
}
std::string ClientMediaDownloader::makeReferer(Client *client)
{
std::string addr = client->getAddressName();
if (addr.find(':') != std::string::npos)
addr = '[' + addr + ']';
return std::string("minetest://") + addr + ':' +
std::to_string(client->getServerAddress().getPort());
}
void ClientMediaDownloader::initialStep(Client *client)
{
std::wstring loading_text = wstrgettext("Media...");
@ -227,9 +236,14 @@ void ClientMediaDownloader::initialStep(Client *client)
m_httpfetch_active_limit = g_settings->getS32("curl_parallel_limit");
m_httpfetch_active_limit = MYMAX(m_httpfetch_active_limit, 84);
// Write a list of hashes that we need. This will be POSTed
// to the server using Content-Type: application/octet-stream
std::string required_hash_set = serializeRequiredHashSet();
// Note: we used to use a POST request that contained the set of
// hashes we needed here, but this use was discontinued in 5.12.0 as
// it's not CDN/static hosting-friendly.
// Even with a large repository of media (think 60k files), you would be
// looking at only 1.1 MB for the index file.
// If it becomes a problem we can always still introduce a v2 of the
// hash set format and truncate the hashes to 6 bytes -- at the cost of
// a false-positive rate of 2^-48 -- which is 70% less space.
// minor fixme: this loop ignores m_httpfetch_active_limit
@ -251,19 +265,8 @@ void ClientMediaDownloader::initialStep(Client *client)
remote->baseurl + MTHASHSET_FILE_NAME;
fetch_request.caller = m_httpfetch_caller;
fetch_request.request_id = m_httpfetch_next_id; // == i
fetch_request.method = HTTP_POST;
fetch_request.raw_data = required_hash_set;
fetch_request.extra_headers.emplace_back(
"Content-Type: application/octet-stream");
// Encapsulate possible IPv6 plain address in []
std::string addr = client->getAddressName();
if (addr.find(':', 0) != std::string::npos)
addr = '[' + addr + ']';
fetch_request.extra_headers.emplace_back(
std::string("Referer: minetest://") +
addr + ":" +
std::to_string(client->getServerAddress().getPort()));
"Referer: " + makeReferer(client));
httpfetch_async(fetch_request);
@ -585,25 +588,6 @@ bool IClientMediaDownloader::checkAndLoad(
1 - Initial version
*/
std::string ClientMediaDownloader::serializeRequiredHashSet()
{
std::ostringstream os(std::ios::binary);
writeU32(os, MTHASHSET_FILE_SIGNATURE); // signature
writeU16(os, 1); // version
// Write list of hashes of files that have not been
// received (found in cache) yet
for (const auto &it : m_files) {
if (!it.second->received) {
FATAL_ERROR_IF(it.second->sha1.size() != 20, "Invalid SHA1 size");
os << it.second->sha1;
}
}
return os.str();
}
void ClientMediaDownloader::deSerializeHashSet(const std::string &data,
std::set<std::string> &result)
{

View file

@ -119,6 +119,8 @@ protected:
bool loadMedia(Client *client, const std::string &data,
const std::string &name) override;
static std::string makeReferer(Client *client);
private:
struct FileStatus {
bool received;
@ -142,7 +144,6 @@ private:
static void deSerializeHashSet(const std::string &data,
std::set<std::string> &result);
std::string serializeRequiredHashSet();
// Maps filename to file status
std::map<std::string, FileStatus*> m_files;