#include "quakedef.h"
#include "fs.h"
#include "libcurl.h"
+#include "thread.h"
static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
static dllhandle_t curl_dll = NULL;
// will be checked at many places to find out if qcurl calls are allowed
+void *curl_mutex = NULL;
+
typedef struct downloadinfo_s
{
char filename[MAX_OSPATH];
Setting the command to NULL clears it.
====================
*/
-void Curl_CommandWhenDone(const char *cmd)
+static void Curl_CommandWhenDone(const char *cmd)
{
if(!curl_dll)
return;
Problem: what counts as an error?
*/
-void Curl_CommandWhenError(const char *cmd)
+static void Curl_CommandWhenError(const char *cmd)
{
if(!curl_dll)
return;
downloadinfo *di;
if(noclear)
return;
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
for(di = downloads; di; di = di->next)
di->forthismap = false;
Curl_CommandWhenError(NULL);
numdownloads_fail = 0;
numdownloads_success = 0;
numdownloads_added = 0;
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
}
/*
void Curl_Register_predownload(void)
{
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
Curl_CommandWhenDone("cl_begindownloads");
Curl_CommandWhenError("cl_begindownloads");
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
}
/*
{
if(!curl_dll)
return;
- if(numdownloads_added && (numdownloads_success == numdownloads_added) && *command_when_done)
- {
- Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
- Cbuf_AddText("\n");
- Cbuf_AddText(command_when_done);
- Cbuf_AddText("\n");
- Curl_Clear_forthismap();
- }
- else if(numdownloads_added && numdownloads_fail && *command_when_error)
+ if(numdownloads_added && ((numdownloads_success + numdownloads_fail) == numdownloads_added))
{
- Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
- Cbuf_AddText("\n");
- Cbuf_AddText(command_when_error);
- Cbuf_AddText("\n");
+ if(numdownloads_fail == 0)
+ {
+ Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
+ Cbuf_AddText("\n");
+ Cbuf_AddText(command_when_done);
+ Cbuf_AddText("\n");
+ }
+ else
+ {
+ Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
+ Cbuf_AddText("\n");
+ Cbuf_AddText(command_when_error);
+ Cbuf_AddText("\n");
+ }
Curl_Clear_forthismap();
}
}
}
}
-static void curl_quiet_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata)
-{
- curl_default_callback(status, length_received, buffer, cbdata);
-}
-
/*
====================
Curl_EndDownload
Returns a "cleaned up" URL for display (to strip login data)
====================
*/
-static const char *CleanURL(const char *url)
+static const char *CleanURL(const char *url, char *urlbuf, size_t urlbuflength)
{
- static char urlbuf[1024];
const char *p, *q, *r;
// if URL is of form anything://foo-without-slash@rest, replace by anything://rest
r = strchr(p + 3, '/');
if(!r || q < r)
{
- dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s%s", (int)(p - url + 3), url, q + 1);
+ dpsnprintf(urlbuf, urlbuflength, "%.*s%s", (int)(p - url + 3), url, q + 1);
return urlbuf;
}
}
static void CheckPendingDownloads(void)
{
const char *h;
+ char urlbuf[1024];
+ char vabuf[1024];
if(!curl_dll)
return;
if(numdownloads < cl_curl_maxdownloads.integer)
{
if(!di->buffer)
{
- Con_Printf("Downloading %s -> %s", CleanURL(di->url), di->filename);
+ Con_Printf("Downloading %s -> %s", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
di->stream = FS_OpenRealFile(di->filename, "ab", false);
if(!di->stream)
}
else
{
- Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url));
+ Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url, urlbuf, sizeof(urlbuf)));
di->startpos = 0;
}
qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
- di->slist = qcurl_slist_append(di->slist, va("Content-Type: %s", di->post_content_type));
+ di->slist = qcurl_slist_append(di->slist, va(vabuf, sizeof(vabuf), "Content-Type: %s", di->post_content_type));
}
// parse extra headers into slist
}
qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
-
qcurl_multi_add_handle(curlm, di->curle);
di->started = true;
CURL_OpenLibrary();
if(!curl_dll)
return;
+ if (Thread_HasThreads()) curl_mutex = Thread_CreateMutex();
qcurl_global_init(CURL_GLOBAL_NOTHING);
curlm = qcurl_multi_init();
}
return;
Curl_ClearRequirements();
Curl_CancelAll();
+ if (curl_mutex) Thread_DestroyMutex(curl_mutex);
CURL_CloseLibrary();
curl_dll = NULL;
}
return NULL;
}
-void Curl_Cancel_ToMemory(curl_callback_t callback, void *cbdata)
-{
- downloadinfo *di;
- if(!curl_dll)
- return;
- for(di = downloads; di; )
- {
- if(di->callback == callback && di->callback_data == cbdata)
- {
- di->callback = curl_quiet_callback; // do NOT call the callback
- Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK);
- di = downloads;
- }
- else
- di = di->next;
- }
-}
-
/*
====================
Curl_Begin
// 141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
if(!name)
- name = CleanURL(URL);
+ name = CleanURL(URL, urlbuf, sizeof(urlbuf));
+
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
if(!buf)
{
downloadinfo *di = Curl_Find(fn);
if(di)
{
- Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url));
+ Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url, urlbuf, sizeof(urlbuf)));
// however, if it was not for this map yet...
if(forthismap && !di->forthismap)
if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8))
{
Con_Printf("Curl_Begin(\"%s\"): nasty URL scheme rejected\n", URL);
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
return false;
}
}
downloads = di;
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
return true;
}
}
if(!curl_dll)
return;
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
+
Curl_CheckCommandWhenDone();
if(!downloads)
+ {
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
return;
+ }
if(realtime < curltime) // throttle
+ {
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
return;
+ }
{
int remaining;
for(di = downloads; di; di = di->next)
{
double b = 0;
- qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
- bytes_sent += (b - di->bytes_sent_curl);
- di->bytes_sent_curl = b;
- qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
- bytes_sent += (b - di->bytes_received_curl);
- di->bytes_received_curl = b;
+ if(di->curle)
+ {
+ qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
+ bytes_sent += (b - di->bytes_sent_curl);
+ di->bytes_sent_curl = b;
+ qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
+ bytes_sent += (b - di->bytes_received_curl);
+ di->bytes_received_curl = b;
+ }
}
for(;;)
}
else
curltime = realtime;
+
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
}
/*
if(!curl_dll)
return;
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
+
while(downloads)
{
Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK);
// INVARIANT: downloads will point to the next download after that!
}
+
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
}
/*
static void Curl_Info_f(void)
{
downloadinfo *di;
+ char urlbuf[1024];
if(!curl_dll)
return;
if(Curl_Running())
{
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
Con_Print("Currently running downloads:\n");
for(di = downloads; di; di = di->next)
{
double speed, percent;
- Con_Printf(" %s -> %s ", CleanURL(di->url), di->filename);
+ Con_Printf(" %s -> %s ", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
percent = 100.0 * Curl_GetDownloadAmount(di);
speed = Curl_GetDownloadSpeed(di);
if(percent >= 0)
else
Con_Print("(queued)\n");
}
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
}
else
{
once the last download completes successfully, reconnect to the current server
====================
*/
-void Curl_Curl_f(void)
+static void Curl_Curl_f(void)
{
double maxspeed = 0;
int i;
array must be freed later using Z_Free.
====================
*/
-Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info)
+Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info, char *addinfo, size_t addinfolength)
{
int i;
downloadinfo *di;
Curl_downloadinfo_t *downinfo;
- static char addinfo[128];
if(!curl_dll)
{
return NULL;
}
+ if (curl_mutex) Thread_LockMutex(curl_mutex);
+
i = 0;
for(di = downloads; di; di = di->next)
++i;
if(*command_when_done && !numdownloads_fail && numdownloads_added)
{
if(!strncmp(command_when_done, "connect ", 8))
- dpsnprintf(addinfo, sizeof(addinfo), "(will join %s when done)", command_when_done + 8);
+ dpsnprintf(addinfo, addinfolength, "(will join %s when done)", command_when_done + 8);
else if(!strcmp(command_when_done, "cl_begindownloads"))
- dpsnprintf(addinfo, sizeof(addinfo), "(will enter the game when done)");
+ dpsnprintf(addinfo, addinfolength, "(will enter the game when done)");
else
- dpsnprintf(addinfo, sizeof(addinfo), "(will do '%s' when done)", command_when_done);
+ dpsnprintf(addinfo, addinfolength, "(will do '%s' when done)", command_when_done);
*additional_info = addinfo;
}
else
}
*nDownloads = i;
+ if (curl_mutex) Thread_UnlockMutex(curl_mutex);
return downinfo;
}
*/
static const char *Curl_FindPackURL(const char *filename)
{
- static char foundurl[1024];
+ static char foundurl[1024]; // invoked only by server
fs_offset_t filesize;
char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize);
if(buf && filesize)
*/
void Curl_ClearRequirements(void)
{
- const char *p;
while(requirements)
{
requirement *req = requirements;
requirements = requirements->next;
Z_Free(req);
}
- p = sv_curl_serverpackages.string;
- Con_DPrintf("Require all of: %s\n", p);
- while(COM_ParseToken_Simple(&p, false, false))
- {
- Con_DPrintf("Require: %s\n", com_token);
- Curl_RequireFile(com_token);
- }
}
/*
curl --finish_autodownload
====================
*/
+static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, char *sendbuffer, size_t sendbuffer_len)
+{
+ const char *p;
+ const char *thispack = FS_WhichPack(filename);
+ const char *packurl;
+
+ if(!thispack)
+ return false;
+
+ p = strrchr(thispack, '/');
+ if(p)
+ thispack = p + 1;
+
+ packurl = Curl_FindPackURL(thispack);
+
+ if(packurl && *packurl && strcmp(packurl, "-"))
+ {
+ if(!foundone)
+ strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len);
+
+ strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len);
+ strlcat(sendbuffer, thispack, sendbuffer_len);
+ if(sv_curl_maxspeed.value > 0)
+ dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
+ strlcat(sendbuffer, " --for ", sendbuffer_len);
+ strlcat(sendbuffer, filename, sendbuffer_len);
+ strlcat(sendbuffer, " ", sendbuffer_len);
+ strlcat(sendbuffer, packurl, sendbuffer_len);
+ strlcat(sendbuffer, thispack, sendbuffer_len);
+ strlcat(sendbuffer, "\n", sendbuffer_len);
+
+ return true;
+ }
+
+ return false;
+}
void Curl_SendRequirements(void)
{
// for each requirement, find the pack name
char sendbuffer[4096] = "";
requirement *req;
qboolean foundone = false;
+ const char *p;
for(req = requirements; req; req = req->next)
- {
- const char *p;
- const char *thispack = FS_WhichPack(req->filename);
- const char *packurl;
+ foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
- if(!thispack)
- continue;
-
- p = strrchr(thispack, '/');
- if(p)
- thispack = p + 1;
-
- packurl = Curl_FindPackURL(thispack);
-
- if(packurl && *packurl && strcmp(packurl, "-"))
- {
- if(!foundone)
- strlcat(sendbuffer, "curl --clear_autodownload\n", sizeof(sendbuffer));
-
- strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
- strlcat(sendbuffer, thispack, sizeof(sendbuffer));
- if(sv_curl_maxspeed.value > 0)
- dpsnprintf(sendbuffer + strlen(sendbuffer), sizeof(sendbuffer) - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
- strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
- strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
- strlcat(sendbuffer, " ", sizeof(sendbuffer));
- strlcat(sendbuffer, packurl, sizeof(sendbuffer));
- strlcat(sendbuffer, thispack, sizeof(sendbuffer));
- strlcat(sendbuffer, "\n", sizeof(sendbuffer));
-
- foundone = true;
- }
- }
+ p = sv_curl_serverpackages.string;
+ while(COM_ParseToken_Simple(&p, false, false, true))
+ foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
if(foundone)
strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));