#include "libcurl.h"
static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
-static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","100", "maximum download speed (KiB/s)"};
+static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"};
static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
+static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"};
static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
/*
typedef struct CURL_s CURL;
typedef struct CURLM_s CURLM;
+typedef struct curl_slist curl_slist;
typedef enum
{
CURLE_OK = 0
CINIT(URL, OBJECTPOINT, 2),
CINIT(ERRORBUFFER, OBJECTPOINT, 10),
CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
+ CINIT(POSTFIELDS, OBJECTPOINT, 15),
CINIT(REFERER, OBJECTPOINT, 16),
CINIT(USERAGENT, OBJECTPOINT, 18),
+ CINIT(LOW_SPEED_LIMIT, LONG , 19),
+ CINIT(LOW_SPEED_TIME, LONG, 20),
CINIT(RESUME_FROM, LONG, 21),
+ CINIT(HTTPHEADER, OBJECTPOINT, 23),
+ CINIT(POST, LONG, 47), /* HTTP POST method */
CINIT(FOLLOWLOCATION, LONG, 52), /* use Location: Luke! */
+ CINIT(POSTFIELDSIZE, LONG, 60),
CINIT(PRIVATE, OBJECTPOINT, 103),
- CINIT(LOW_SPEED_LIMIT, LONG , 19),
- CINIT(LOW_SPEED_TIME, LONG, 20),
CINIT(PROTOCOLS, LONG, 181),
- CINIT(REDIR_PROTOCOLS, LONG, 182),
+ CINIT(REDIR_PROTOCOLS, LONG, 182)
}
CURLoption;
#define CURLPROTO_HTTP (1<<0)
CURLINFO_PROXYAUTH_AVAIL = CURLINFO_LONG + 24,
CURLINFO_OS_ERRNO = CURLINFO_LONG + 25,
CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26,
- CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27,
+ CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27
}
CURLINFO;
static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
static void (*qcurl_multi_cleanup) (CURLM *);
static const char * (*qcurl_multi_strerror) (CURLcode);
+static curl_slist * (*qcurl_slist_append) (curl_slist *list, const char *string);
+static void (*qcurl_slist_free_all) (curl_slist *list);
static dllfunction_t curlfuncs[] =
{
{"curl_multi_info_read", (void **) &qcurl_multi_info_read},
{"curl_multi_cleanup", (void **) &qcurl_multi_cleanup},
{"curl_multi_strerror", (void **) &qcurl_multi_strerror},
+ {"curl_slist_append", (void **) &qcurl_slist_append},
+ {"curl_slist_free_all", (void **) &qcurl_slist_free_all},
{NULL, NULL}
};
CURL *curle;
qboolean started;
qboolean ispak;
- unsigned long bytes_received;
+ unsigned long bytes_received; // for buffer
+ double bytes_received_curl; // for throttling
+ double bytes_sent_curl; // for throttling
struct downloadinfo_s *next, *prev;
qboolean forthismap;
+ double maxspeed;
+ curl_slist *slist; // http headers
unsigned char *buffer;
size_t buffersize;
curl_callback_t callback;
void *callback_data;
+
+ const unsigned char *postbuf;
+ size_t postbufsize;
+ const char *post_content_type;
+ const char *extraheaders;
}
downloadinfo;
static downloadinfo *downloads = NULL;
{
const char* dllnames [] =
{
-#if defined(WIN64)
- "libcurl64.dll",
-#elif defined(WIN32)
+#if defined(WIN32)
"libcurl-4.dll",
"libcurl-3.dll",
#elif defined(MACOSX)
static CURLM *curlm = NULL;
-static unsigned long bytes_received = 0; // used for bandwidth throttling
+static double bytes_received = 0; // used for bandwidth throttling
+static double bytes_sent = 0; // used for bandwidth throttling
static double curltime = 0;
/*
ret = FS_Write(di->stream, data, bytes);
}
- bytes_received += bytes;
di->bytes_received += bytes;
return ret; // why not ret / nmemb?
switch(status)
{
case CURLCBSTATUS_OK:
- Con_Printf("Download of %s: OK\n", di->filename);
+ Con_DPrintf("Download of %s: OK\n", di->filename);
break;
case CURLCBSTATUS_FAILED:
- Con_Printf("Download of %s: FAILED\n", di->filename);
+ Con_DPrintf("Download of %s: FAILED\n", di->filename);
break;
case CURLCBSTATUS_ABORTED:
- Con_Printf("Download of %s: ABORTED\n", di->filename);
+ Con_DPrintf("Download of %s: ABORTED\n", di->filename);
break;
case CURLCBSTATUS_SERVERERROR:
- Con_Printf("Download of %s: (unknown server error)\n", di->filename);
+ Con_DPrintf("Download of %s: (unknown server error)\n", di->filename);
break;
case CURLCBSTATUS_UNKNOWN:
- Con_Printf("Download of %s: (unknown client error)\n", di->filename);
+ Con_DPrintf("Download of %s: (unknown client error)\n", di->filename);
break;
default:
- Con_Printf("Download of %s: %d\n", di->filename, status);
+ Con_DPrintf("Download of %s: %d\n", di->filename, status);
break;
}
}
static void curl_quiet_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata)
{
- if(developer.integer)
- curl_default_callback(status, length_received, buffer, cbdata);
+ curl_default_callback(status, length_received, buffer, cbdata);
}
/*
code from libcurl, or 0, if another error has occurred.
====================
*/
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
{
qboolean ok = false;
{
qcurl_multi_remove_handle(curlm, di->curle);
qcurl_easy_cleanup(di->curle);
+ if(di->slist)
+ qcurl_slist_free_all(di->slist);
}
if(!di->callback && ok && !di->bytes_received)
{
// this was a resume?
// then try to redownload it without reporting the error
- Curl_Begin(di->url, di->filename, di->ispak, di->forthismap, NULL, 0, NULL, NULL);
+ Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->ispak, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL);
di->forthismap = false; // don't count the error
}
}
Z_Free(di);
}
+/*
+====================
+CleanURL
+
+Returns a "cleaned up" URL for display (to strip login data)
+====================
+*/
+static const char *CleanURL(const char *url)
+{
+ static char urlbuf[1024];
+ const char *p, *q, *r;
+
+ // if URL is of form anything://foo-without-slash@rest, replace by anything://rest
+ p = strstr(url, "://");
+ if(p)
+ {
+ q = strchr(p + 3, '@');
+ if(q)
+ {
+ r = strchr(p + 3, '/');
+ if(!r || q < r)
+ {
+ dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s%s", (int)(p - url + 3), url, q + 1);
+ return urlbuf;
+ }
+ }
+ }
+
+ return url;
+}
+
/*
====================
CheckPendingDownloads
*/
static void CheckPendingDownloads(void)
{
+ const char *h;
if(!curl_dll)
return;
if(numdownloads < cl_curl_maxdownloads.integer)
{
if(!di->buffer)
{
- Con_Printf("Downloading %s -> %s", di->url, di->filename);
+ Con_Printf("Downloading %s -> %s", CleanURL(di->url), di->filename);
di->stream = FS_OpenRealFile(di->filename, "ab", false);
if(!di->stream)
}
else
{
- Con_DPrintf("Downloading %s -> memory\n", di->url);
+ Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url));
di->startpos = 0;
}
di->curle = qcurl_easy_init();
+ di->slist = NULL;
qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion);
qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
Con_Printf("^1WARNING:^7 for security reasons, please upgrade to libcurl 7.19.4 or above. In a later version of DarkPlaces, HTTP redirect support will be disabled for this libcurl version.\n");
//qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 0);
}
+ if(di->post_content_type)
+ {
+ qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
+ qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
+ qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
+ di->slist = qcurl_slist_append(di->slist, va("Content-Type: %s", di->post_content_type));
+ }
+
+ // parse extra headers into slist
+ // \n separated list!
+ h = di->extraheaders;
+ while(h)
+ {
+ const char *hh = strchr(h, '\n');
+ if(hh)
+ {
+ char *buf = (char *) Mem_Alloc(tempmempool, hh - h + 1);
+ memcpy(buf, h, hh - h);
+ buf[hh - h] = 0;
+ di->slist = qcurl_slist_append(di->slist, buf);
+ h = hh + 1;
+ }
+ else
+ {
+ di->slist = qcurl_slist_append(di->slist, h);
+ h = NULL;
+ }
+ }
+
+ qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
+
qcurl_multi_add_handle(curlm, di->curle);
di->started = true;
if given) in the "dlcache/" folder.
====================
*/
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
{
if(!curl_dll)
{
else
{
char fn[MAX_OSPATH];
+ char urlbuf[1024];
const char *p, *q;
size_t length;
downloadinfo *di;
+ // if URL is protocol:///* or protocol://:port/*, insert the IP of the current server
+ p = strchr(URL, ':');
+ if(p)
+ {
+ if(!strncmp(p, ":///", 4) || !strncmp(p, "://:", 4))
+ {
+ char addressstring[128];
+ *addressstring = 0;
+ InfoString_GetValue(cls.userinfo, "*ip", addressstring, sizeof(addressstring));
+ q = strchr(addressstring, ':');
+ if(!q)
+ q = addressstring + strlen(addressstring);
+ if(*addressstring)
+ {
+ dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s://%.*s%s", (int) (p - URL), URL, (int) (q - addressstring), addressstring, URL + (p - URL) + 3);
+ URL = urlbuf;
+ }
+ }
+ }
+
// Note: This extraction of the file name portion is NOT entirely correct.
//
// It does the following:
// 141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
if(!name)
- name = URL;
+ name = CleanURL(URL);
if(!buf)
{
downloadinfo *di = Curl_Find(fn);
if(di)
{
- Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url);
+ Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url));
// however, if it was not for this map yet...
if(forthismap && !di->forthismap)
di->curle = NULL;
di->started = false;
di->ispak = (ispak && !buf);
+ di->maxspeed = maxspeed;
di->bytes_received = 0;
+ di->bytes_received_curl = 0;
+ di->bytes_sent_curl = 0;
+ di->extraheaders = extraheaders;
di->next = downloads;
di->prev = NULL;
if(di->next)
di->callback_data = cbdata;
}
+ if(post_content_type)
+ {
+ di->post_content_type = post_content_type;
+ di->postbuf = postbuf;
+ di->postbufsize = postbufsize;
+ }
+ else
+ {
+ di->post_content_type = NULL;
+ di->postbuf = NULL;
+ di->postbufsize = 0;
+ }
+
downloads = di;
return true;
}
}
-qboolean Curl_Begin_ToFile(const char *URL, const char *name, qboolean ispak, qboolean forthismap)
+qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap)
{
- return Curl_Begin(URL, name, ispak, forthismap, NULL, 0, NULL, NULL);
+ return Curl_Begin(URL, NULL, maxspeed, name, ispak, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL);
}
-qboolean Curl_Begin_ToMemory(const char *URL, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
{
- return Curl_Begin(URL, NULL, false, false, buf, bufsize, callback, cbdata);
+ return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata);
+}
+qboolean Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+{
+ return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata);
}
/*
*/
void Curl_Run(void)
{
+ double maxspeed;
+ downloadinfo *di;
+
noclear = FALSE;
if(!cl_curl_enabled.integer)
}
while(mc == CURLM_CALL_MULTI_PERFORM);
+ for(di = downloads; di; di = di->next)
+ {
+ double b = 0;
+ qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
+ bytes_sent += (b - di->bytes_sent_curl);
+ di->bytes_sent_curl = b;
+ qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
+ bytes_sent += (b - di->bytes_received_curl);
+ di->bytes_received_curl = b;
+ }
+
for(;;)
{
CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
break;
if(msg->msg == CURLMSG_DONE)
{
- downloadinfo *di;
CurlStatus failed = CURL_DOWNLOAD_SUCCESS;
CURLcode result;
qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di);
// when will we curl the next time?
// we will wait a bit to ensure our download rate is kept.
// we now know that realtime >= curltime... so set up a new curltime
- if(cl_curl_maxspeed.value > 0)
+
+ // use the slowest allowing download to derive the maxspeed... this CAN
+ // be done better, but maybe later
+ maxspeed = cl_curl_maxspeed.value;
+ for(di = downloads; di; di = di->next)
+ if(di->maxspeed > 0)
+ if(di->maxspeed < maxspeed || maxspeed <= 0)
+ maxspeed = di->maxspeed;
+
+ if(maxspeed > 0)
{
- unsigned long bytes = bytes_received; // maybe smoothen a bit?
+ double bytes = bytes_sent + bytes_received; // maybe smoothen a bit?
curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
- bytes_received -= bytes;
+ bytes_sent = 0;
+ bytes_received = 0;
}
else
curltime = realtime;
for(di = downloads; di; di = di->next)
{
double speed, percent;
- Con_Printf(" %s -> %s ", di->url, di->filename);
+ Con_Printf(" %s -> %s ", CleanURL(di->url), di->filename);
percent = 100.0 * Curl_GetDownloadAmount(di);
speed = Curl_GetDownloadSpeed(di);
if(percent >= 0)
*/
void Curl_Curl_f(void)
{
+ double maxspeed = 0;
int i;
int end;
qboolean pak = false;
return;
}
- for(i = 0; i != Cmd_Argc(); ++i)
- Con_DPrintf("%s ", Cmd_Argv(i));
- Con_DPrint("\n");
-
if(Cmd_Argc() < 2)
{
Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
{
pak = true;
}
- else if(!strcmp(a, "--for"))
+ else if(!strcmp(a, "--for")) // must be last option
{
for(i = i + 1; i != end - 1; ++i)
{
}
return;
}
+ else if(!strncmp(a, "--maxspeed=", 11))
+ {
+ maxspeed = atof(a + 11);
+ }
else if(*a == '-')
{
- Con_Printf("invalid option %s\n", a);
- return;
+ Con_Printf("curl: invalid option %s\n", a);
+ // but we ignore the option
}
}
needthefile:
- Curl_Begin_ToFile(url, name, pak, forthismap);
+ Curl_Begin_ToFile(url, maxspeed, name, pak, forthismap);
}
/*
Cvar_RegisterVariable (&cl_curl_maxspeed);
Cvar_RegisterVariable (&sv_curl_defaulturl);
Cvar_RegisterVariable (&sv_curl_serverpackages);
+ Cvar_RegisterVariable (&sv_curl_maxspeed);
Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
//Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
}
for(di = downloads; di; di = di->next)
{
// do not show infobars for background downloads
- if(!developer.integer)
+ if(developer.integer <= 0)
if(di->buffer)
continue;
strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
strlcat(sendbuffer, thispack, sizeof(sendbuffer));
+ if(sv_curl_maxspeed.value > 0)
+ dpsnprintf(sendbuffer + strlen(sendbuffer), sizeof(sendbuffer) - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
strlcat(sendbuffer, " ", sizeof(sendbuffer));