]> git.xonotic.org Git - xonotic/darkplaces.git/blobdiff - libcurl.c
cl_main: Keep old CL_Disconnect for simplicity. Move guts to CL_DisconnectEx
[xonotic/darkplaces.git] / libcurl.c
index f97946fe1bd2b365c1fe399d429837574eab03dc..fd3e7a61583a6dbaaa88a6f29a1e8541d7c09553 100644 (file)
--- a/libcurl.c
+++ b/libcurl.c
@@ -1,12 +1,20 @@
 #include "quakedef.h"
 #include "fs.h"
 #include "libcurl.h"
-
-static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
-static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","100", "maximum download speed (KiB/s)"};
-static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"};
-static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
-static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
+#include "thread.h"
+#include "com_list.h"
+#include "image.h"
+#include "jpeg.h"
+#include "image_png.h"
+
+static cvar_t cl_curl_maxdownloads = {CF_CLIENT | CF_ARCHIVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
+static cvar_t cl_curl_maxspeed = {CF_CLIENT | CF_ARCHIVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
+static cvar_t sv_curl_defaulturl = {CF_SERVER | CF_ARCHIVE, "sv_curl_defaulturl","", "default autodownload source URL"};
+static cvar_t sv_curl_serverpackages = {CF_SERVER | CF_ARCHIVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
+static cvar_t sv_curl_maxspeed = {CF_SERVER | CF_ARCHIVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"};
+static cvar_t cl_curl_enabled = {CF_CLIENT | CF_ARCHIVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
+static cvar_t cl_curl_useragent = {CF_CLIENT, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"};
+static cvar_t cl_curl_useragent_append = {CF_CLIENT, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"};
 
 /*
 =================================================================
@@ -21,6 +29,7 @@ static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether clie
 
 typedef struct CURL_s CURL;
 typedef struct CURLM_s CURLM;
+typedef struct curl_slist curl_slist;
 typedef enum
 {
        CURLE_OK = 0
@@ -46,15 +55,24 @@ typedef enum
        CINIT(URL,  OBJECTPOINT, 2),
        CINIT(ERRORBUFFER, OBJECTPOINT, 10),
        CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
+       CINIT(POSTFIELDS, OBJECTPOINT, 15),
        CINIT(REFERER, OBJECTPOINT, 16),
        CINIT(USERAGENT, OBJECTPOINT, 18),
+       CINIT(LOW_SPEED_LIMIT, LONG , 19),
+       CINIT(LOW_SPEED_TIME, LONG, 20),
        CINIT(RESUME_FROM, LONG, 21),
+       CINIT(HTTPHEADER, OBJECTPOINT, 23),
+       CINIT(POST, LONG, 47),         /* HTTP POST method */
        CINIT(FOLLOWLOCATION, LONG, 52),  /* use Location: Luke! */
+       CINIT(POSTFIELDSIZE, LONG, 60),
        CINIT(PRIVATE, OBJECTPOINT, 103),
-       CINIT(LOW_SPEED_LIMIT, LONG , 19),
-       CINIT(LOW_SPEED_TIME, LONG, 20),
+       CINIT(PROTOCOLS, LONG, 181),
+       CINIT(REDIR_PROTOCOLS, LONG, 182)
 }
 CURLoption;
+#define CURLPROTO_HTTP   (1<<0)
+#define CURLPROTO_HTTPS  (1<<1)
+#define CURLPROTO_FTP    (1<<2)
 typedef enum
 {
        CURLINFO_TEXT = 0,
@@ -102,7 +120,7 @@ typedef enum
        CURLINFO_PROXYAUTH_AVAIL  = CURLINFO_LONG   + 24,
        CURLINFO_OS_ERRNO         = CURLINFO_LONG   + 25,
        CURLINFO_NUM_CONNECTS     = CURLINFO_LONG   + 26,
-       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27,
+       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27
 }
 CURLINFO;
 
@@ -128,21 +146,23 @@ typedef struct
 CURLMsg;
 
 static void (*qcurl_global_init) (long flags);
-static void (*qcurl_global_cleanup) ();
+static void (*qcurl_global_cleanup) (void);
 
-static CURL * (*qcurl_easy_init) ();
+static CURL * (*qcurl_easy_init) (void);
 static void (*qcurl_easy_cleanup) (CURL *handle);
 static CURLcode (*qcurl_easy_setopt) (CURL *handle, CURLoption option, ...);
 static CURLcode (*qcurl_easy_getinfo) (CURL *handle, CURLINFO info, ...);
 static const char * (*qcurl_easy_strerror) (CURLcode);
 
-static CURLM * (*qcurl_multi_init) ();
+static CURLM * (*qcurl_multi_init) (void);
 static CURLMcode (*qcurl_multi_perform) (CURLM *multi_handle, int *running_handles);
 static CURLMcode (*qcurl_multi_add_handle) (CURLM *multi_handle, CURL *easy_handle);
 static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_handle);
 static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
 static void (*qcurl_multi_cleanup) (CURLM *);
 static const char * (*qcurl_multi_strerror) (CURLcode);
+static curl_slist * (*qcurl_slist_append) (curl_slist *list, const char *string);
+static void (*qcurl_slist_free_all) (curl_slist *list);
 
 static dllfunction_t curlfuncs[] =
 {
@@ -160,6 +180,8 @@ static dllfunction_t curlfuncs[] =
        {"curl_multi_info_read",        (void **) &qcurl_multi_info_read},
        {"curl_multi_cleanup",          (void **) &qcurl_multi_cleanup},
        {"curl_multi_strerror",         (void **) &qcurl_multi_strerror},
+       {"curl_slist_append",           (void **) &qcurl_slist_append},
+       {"curl_slist_free_all",         (void **) &qcurl_slist_free_all},
        {NULL, NULL}
 };
 
@@ -167,30 +189,46 @@ static dllfunction_t curlfuncs[] =
 static dllhandle_t curl_dll = NULL;
 // will be checked at many places to find out if qcurl calls are allowed
 
+#define LOADTYPE_NONE 0
+#define LOADTYPE_PAK 1
+#define LOADTYPE_CACHEPIC 2
+#define LOADTYPE_SKINFRAME 3
+
+void *curl_mutex = NULL;
+
 typedef struct downloadinfo_s
 {
-       char filename[MAX_QPATH];
-       char url[256];
+       char filename[MAX_OSPATH];
+       char url[1024];
        char referer[256];
        qfile_t *stream;
        fs_offset_t startpos;
        CURL *curle;
-       qboolean started;
-       qboolean ispak;
-       unsigned long bytes_received;
-       struct downloadinfo_s *next, *prev;
-       qboolean forthismap;
+       qbool started;
+       int loadtype;
+       size_t bytes_received; // for buffer
+       double bytes_received_curl; // for throttling
+       double bytes_sent_curl; // for throttling
+       llist_t list;
+       qbool forthismap;
+       double maxspeed;
+       curl_slist *slist; // http headers
 
        unsigned char *buffer;
        size_t buffersize;
        curl_callback_t callback;
        void *callback_data;
+
+       const unsigned char *postbuf;
+       size_t postbufsize;
+       const char *post_content_type;
+       const char *extraheaders;
 }
 downloadinfo;
-static downloadinfo *downloads = NULL;
+LIST_HEAD(downloads);
 static int numdownloads = 0;
 
-static qboolean noclear = FALSE;
+static qbool noclear = false;
 
 static int numdownloads_fail = 0;
 static int numdownloads_success = 0;
@@ -207,7 +245,7 @@ all downloads since last server connect ended with a successful status.
 Setting the command to NULL clears it.
 ====================
 */
-void Curl_CommandWhenDone(const char *cmd)
+static void Curl_CommandWhenDone(const char *cmd)
 {
        if(!curl_dll)
                return;
@@ -223,7 +261,7 @@ Do not use yet. Not complete.
 Problem: what counts as an error?
 */
 
-void Curl_CommandWhenError(const char *cmd)
+static void Curl_CommandWhenError(const char *cmd)
 {
        if(!curl_dll)
                return;
@@ -240,18 +278,20 @@ Curl_Clear_forthismap
 Clears the "will disconnect on failure" flags.
 ====================
 */
-void Curl_Clear_forthismap()
+void Curl_Clear_forthismap(void)
 {
        downloadinfo *di;
        if(noclear)
                return;
-       for(di = downloads; di; di = di->next)
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+       List_For_Each_Entry(di, &downloads, list)
                di->forthismap = false;
        Curl_CommandWhenError(NULL);
        Curl_CommandWhenDone(NULL);
        numdownloads_fail = 0;
        numdownloads_success = 0;
        numdownloads_added = 0;
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -261,15 +301,17 @@ Curl_Have_forthismap
 Returns true if a download needed for the current game is running.
 ====================
 */
-qboolean Curl_Have_forthismap()
+qbool Curl_Have_forthismap(void)
 {
-       return numdownloads_added;
+       return numdownloads_added != 0;
 }
 
-void Curl_Register_predownload()
+void Curl_Register_predownload(void)
 {
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
        Curl_CommandWhenDone("cl_begindownloads");
        Curl_CommandWhenError("cl_begindownloads");
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -280,24 +322,26 @@ Checks if a "done command" is to be executed.
 All downloads finished, at least one success since connect, no single failure
 -> execute the command.
 */
-static void Curl_CheckCommandWhenDone()
+static void Curl_CheckCommandWhenDone(void)
 {
        if(!curl_dll)
                return;
-       if(numdownloads_added && (numdownloads_success == numdownloads_added) && *command_when_done)
-       {
-               Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
-               Cbuf_AddText("\n");
-               Cbuf_AddText(command_when_done);
-               Cbuf_AddText("\n");
-               Curl_Clear_forthismap();
-       }
-       else if(numdownloads_added && numdownloads_fail && *command_when_error)
+       if(numdownloads_added && ((numdownloads_success + numdownloads_fail) == numdownloads_added))
        {
-               Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
-               Cbuf_AddText("\n");
-               Cbuf_AddText(command_when_error);
-               Cbuf_AddText("\n");
+               if(numdownloads_fail == 0)
+               {
+                       Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
+                       Cbuf_AddText(cmd_local, "\n");
+                       Cbuf_AddText(cmd_local, command_when_done);
+                       Cbuf_AddText(cmd_local, "\n");
+               }
+               else
+               {
+                       Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
+                       Cbuf_AddText(cmd_local, "\n");
+                       Cbuf_AddText(cmd_local, command_when_error);
+                       Cbuf_AddText(cmd_local, "\n");
+               }
                Curl_Clear_forthismap();
        }
 }
@@ -309,13 +353,11 @@ CURL_CloseLibrary
 Load the cURL DLL
 ====================
 */
-static qboolean CURL_OpenLibrary (void)
+static qbool CURL_OpenLibrary (void)
 {
        const char* dllnames [] =
        {
-#if defined(WIN64)
-               "libcurl64.dll",
-#elif defined(WIN32)
+#if defined(WIN32)
                "libcurl-4.dll",
                "libcurl-3.dll",
 #elif defined(MACOSX)
@@ -335,7 +377,7 @@ static qboolean CURL_OpenLibrary (void)
                return true;
 
        // Load the DLL
-       return Sys_LoadLibrary (dllnames, &curl_dll, curlfuncs);
+       return Sys_LoadDependency (dllnames, &curl_dll, curlfuncs);
 }
 
 
@@ -348,12 +390,13 @@ Unload the cURL DLL
 */
 static void CURL_CloseLibrary (void)
 {
-       Sys_UnloadLibrary (&curl_dll);
+       Sys_FreeLibrary (&curl_dll);
 }
 
 
 static CURLM *curlm = NULL;
-static unsigned long bytes_received = 0; // used for bandwidth throttling
+static double bytes_received = 0; // used for bandwidth throttling
+static double bytes_sent = 0; // used for bandwidth throttling
 static double curltime = 0;
 
 /*
@@ -385,10 +428,12 @@ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi)
                ret = FS_Write(di->stream, data, bytes);
        }
 
-       bytes_received += bytes;
        di->bytes_received += bytes;
 
-       return ret; // why not ret / nmemb?
+       return ret;
+       // Why not ret / nmemb?
+       // Because CURLOPT_WRITEFUNCTION docs say to return the number of bytes.
+       // Yes, this is incompatible to fwrite(2).
 }
 
 typedef enum
@@ -406,30 +451,53 @@ static void curl_default_callback(int status, size_t length_received, unsigned c
        switch(status)
        {
                case CURLCBSTATUS_OK:
-                       Con_Printf("Download of %s: OK\n", di->filename);
+                       Con_DPrintf("Download of %s: OK\n", di->filename);
                        break;
                case CURLCBSTATUS_FAILED:
-                       Con_Printf("Download of %s: FAILED\n", di->filename);
+                       Con_DPrintf("Download of %s: FAILED\n", di->filename);
                        break;
                case CURLCBSTATUS_ABORTED:
-                       Con_Printf("Download of %s: ABORTED\n", di->filename);
+                       Con_DPrintf("Download of %s: ABORTED\n", di->filename);
                        break;
                case CURLCBSTATUS_SERVERERROR:
-                       Con_Printf("Download of %s: (unknown server error)\n", di->filename);
+                       Con_DPrintf("Download of %s: (unknown server error)\n", di->filename);
                        break;
                case CURLCBSTATUS_UNKNOWN:
-                       Con_Printf("Download of %s: (unknown client error)\n", di->filename);
+                       Con_DPrintf("Download of %s: (unknown client error)\n", di->filename);
                        break;
                default:
-                       Con_Printf("Download of %s: %d\n", di->filename, status);
+                       Con_DPrintf("Download of %s: %d\n", di->filename, status);
                        break;
        }
 }
 
 static void curl_quiet_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata)
 {
-       if(developer.integer)
-               curl_default_callback(status, length_received, buffer, cbdata);
+       curl_default_callback(status, length_received, buffer, cbdata);
+}
+
+static unsigned char *decode_image(downloadinfo *di, const char *content_type)
+{
+       unsigned char *pixels = NULL;
+       fs_offset_t filesize = 0;
+       unsigned char *data = FS_LoadFile(di->filename, tempmempool, true, &filesize);
+       if(data)
+       {
+               int mip = 0;
+               if(!strcmp(content_type, "image/jpeg"))
+                       pixels = JPEG_LoadImage_BGRA(data, filesize, &mip);
+               else if(!strcmp(content_type, "image/png"))
+                       pixels = PNG_LoadImage_BGRA(data, filesize, &mip);
+               else if(filesize >= 7 && !strncmp((char *) data, "\xFF\xD8", 7))
+                       pixels = JPEG_LoadImage_BGRA(data, filesize, &mip);
+               else if(filesize >= 7 && !strncmp((char *) data, "\x89PNG\x0D\x0A\x1A\x0A", 7))
+                       pixels = PNG_LoadImage_BGRA(data, filesize, &mip);
+               else
+                       Con_Printf("Did not detect content type: %s\n", content_type);
+               Mem_Free(data);
+       }
+       // do we call Image_MakeLinearColorsFromsRGB or not?
+       return pixels;
 }
 
 /*
@@ -441,9 +509,11 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error
 code from libcurl, or 0, if another error has occurred.
 ====================
 */
-static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
+static qbool Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qbool forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
+static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error, const char *content_type_)
 {
-       qboolean ok = false;
+       char content_type[64];
+       qbool ok = false;
        if(!curl_dll)
                return;
        switch(status)
@@ -474,14 +544,20 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
                                di->callback(CURLCBSTATUS_UNKNOWN, di->bytes_received, di->buffer, di->callback_data);
                        break;
        }
+       if(content_type_)
+               strlcpy(content_type, content_type_, sizeof(content_type));
+       else
+               *content_type = 0;
 
        if(di->curle)
        {
                qcurl_multi_remove_handle(curlm, di->curle);
                qcurl_easy_cleanup(di->curle);
+               if(di->slist)
+                       qcurl_slist_free_all(di->slist);
        }
 
-       if(ok && !di->bytes_received)
+       if(!di->callback && ok && !di->bytes_received)
        {
                Con_Printf("ERROR: empty file\n");
                ok = false;
@@ -490,15 +566,61 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        if(di->stream)
                FS_Close(di->stream);
 
-       if(ok && di->ispak)
+#define CLEAR_AND_RETRY() \
+       do \
+       { \
+               di->stream = FS_OpenRealFile(di->filename, "wb", false); \
+               FS_Close(di->stream); \
+               if(di->startpos && !di->callback) \
+               { \
+                       Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->loadtype, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL); \
+                       di->forthismap = false; \
+               } \
+       } \
+       while(0)
+
+       if(ok && di->loadtype == LOADTYPE_PAK)
+       {
                ok = FS_AddPack(di->filename, NULL, true);
+               if(!ok)
+                       CLEAR_AND_RETRY();
+       }
+       else if(ok && di->loadtype == LOADTYPE_CACHEPIC)
+       {
+               const char *p;
+               unsigned char *pixels = NULL;
 
-       if(di->prev)
-               di->prev->next = di->next;
-       else
-               downloads = di->next;
-       if(di->next)
-               di->next->prev = di->prev;
+               p = di->filename;
+#ifdef WE_ARE_EVIL
+               if(!strncmp(p, "dlcache/", 8))
+                       p += 8;
+#endif
+
+               pixels = decode_image(di, content_type);
+               if(pixels)
+                       Draw_NewPic(p, image_width, image_height, pixels, TEXTYPE_BGRA, TEXF_ALPHA | TEXF_CLAMP);
+               else
+                       CLEAR_AND_RETRY();
+       }
+       else if(ok && di->loadtype == LOADTYPE_SKINFRAME)
+       {
+               const char *p;
+               unsigned char *pixels = NULL;
+
+               p = di->filename;
+#ifdef WE_ARE_EVIL
+               if(!strncmp(p, "dlcache/", 8))
+                       p += 8;
+#endif
+
+               pixels = decode_image(di, content_type);
+               if(pixels)
+                       R_SkinFrame_LoadInternalBGRA(p, TEXF_FORCE_RELOAD | TEXF_MIPMAP | TEXF_ALPHA, pixels, image_width, image_height, 0, 0, 0, false); // TODO what sRGB argument to put here?
+               else
+                       CLEAR_AND_RETRY();
+       }
+
+       List_Delete(&di->list);
 
        --numdownloads;
        if(di->forthismap)
@@ -511,6 +633,36 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        Z_Free(di);
 }
 
+/*
+====================
+CleanURL
+
+Returns a "cleaned up" URL for display (to strip login data)
+====================
+*/
+static const char *CleanURL(const char *url, char *urlbuf, size_t urlbuflength)
+{
+       const char *p, *q, *r;
+
+       // if URL is of form anything://foo-without-slash@rest, replace by anything://rest
+       p = strstr(url, "://");
+       if(p)
+       {
+               q = strchr(p + 3, '@');
+               if(q)
+               {
+                       r = strchr(p + 3, '/');
+                       if(!r || q < r)
+                       {
+                               dpsnprintf(urlbuf, urlbuflength, "%.*s%s", (int)(p - url + 3), url, q + 1);
+                               return urlbuf;
+                       }
+               }
+       }
+
+       return url;
+}
+
 /*
 ====================
 CheckPendingDownloads
@@ -520,26 +672,29 @@ To not start too many downloads at once, only one download is added at a time,
 up to a maximum number of cl_curl_maxdownloads are running.
 ====================
 */
-static void CheckPendingDownloads()
+static void CheckPendingDownloads(void)
 {
+       const char *h;
+       char urlbuf[1024];
+       char vabuf[1024];
        if(!curl_dll)
                return;
        if(numdownloads < cl_curl_maxdownloads.integer)
        {
                downloadinfo *di;
-               for(di = downloads; di; di = di->next)
+               List_For_Each_Entry(di, &downloads, list)
                {
                        if(!di->started)
                        {
                                if(!di->buffer)
                                {
-                                       Con_Printf("Downloading %s -> %s", di->url, di->filename);
+                                       Con_Printf("Downloading %s -> %s", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
 
                                        di->stream = FS_OpenRealFile(di->filename, "ab", false);
                                        if(!di->stream)
                                        {
                                                Con_Printf("\nFAILED: Could not open output file %s\n", di->filename);
-                                               Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK);
+                                               Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK, NULL);
                                                return;
                                        }
                                        FS_Seek(di->stream, 0, SEEK_END);
@@ -551,13 +706,34 @@ static void CheckPendingDownloads()
                                }
                                else
                                {
-                                       Con_DPrintf("Downloading %s -> memory\n", di->url);
+                                       Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url, urlbuf, sizeof(urlbuf)));
                                        di->startpos = 0;
                                }
 
                                di->curle = qcurl_easy_init();
+                               di->slist = NULL;
                                qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
-                               qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion);
+                               if(cl_curl_useragent.integer)
+                               {
+                                       const char *ua
+#ifdef HTTP_USER_AGENT
+                                               = HTTP_USER_AGENT;
+#else
+                                               = engineversion;
+#endif
+                                       if(!ua)
+                                               ua = "";
+                                       if(*cl_curl_useragent_append.string)
+                                               ua = va(vabuf, sizeof(vabuf), "%s%s%s",
+                                                       ua,
+                                                       (ua[0] && ua[strlen(ua)-1] != ' ')
+                                                               ? " "
+                                                               : "",
+                                                       cl_curl_useragent_append.string);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ua);
+                               }
+                               else
+                                       qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, "");
                                qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
                                qcurl_easy_setopt(di->curle, CURLOPT_RESUME_FROM, (long) di->startpos);
                                qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 1);
@@ -566,6 +742,43 @@ static void CheckPendingDownloads()
                                qcurl_easy_setopt(di->curle, CURLOPT_LOW_SPEED_TIME, (long) 45);
                                qcurl_easy_setopt(di->curle, CURLOPT_WRITEDATA, (void *) di);
                                qcurl_easy_setopt(di->curle, CURLOPT_PRIVATE, (void *) di);
+                               qcurl_easy_setopt(di->curle, CURLOPT_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS | CURLPROTO_FTP);
+                               if(qcurl_easy_setopt(di->curle, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS | CURLPROTO_FTP) != CURLE_OK)
+                               {
+                                       Con_Printf("^1WARNING:^7 for security reasons, please upgrade to libcurl 7.19.4 or above. In a later version of DarkPlaces, HTTP redirect support will be disabled for this libcurl version.\n");
+                                       //qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 0);
+                               }
+                               if(di->post_content_type)
+                               {
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
+                                       di->slist = qcurl_slist_append(di->slist, va(vabuf, sizeof(vabuf), "Content-Type: %s", di->post_content_type));
+                               }
+
+                               // parse extra headers into slist
+                               // \n separated list!
+                               h = di->extraheaders;
+                               while(h)
+                               {
+                                       const char *hh = strchr(h, '\n');
+                                       if(hh)
+                                       {
+                                               char *buf = (char *) Mem_Alloc(tempmempool, hh - h + 1);
+                                               memcpy(buf, h, hh - h);
+                                               buf[hh - h] = 0;
+                                               di->slist = qcurl_slist_append(di->slist, buf);
+                                               h = hh + 1;
+                                       }
+                                       else
+                                       {
+                                               di->slist = qcurl_slist_append(di->slist, h);
+                                               h = NULL;
+                                       }
+                               }
+
+                               qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
+                               
                                qcurl_multi_add_handle(curlm, di->curle);
                                di->started = true;
                                ++numdownloads;
@@ -584,11 +797,12 @@ this function MUST be called before using anything else in this file.
 On Win32, this must be called AFTER WSAStartup has been done!
 ====================
 */
-void Curl_Init()
+void Curl_Init(void)
 {
        CURL_OpenLibrary();
        if(!curl_dll)
                return;
+       if (Thread_HasThreads()) curl_mutex = Thread_CreateMutex();
        qcurl_global_init(CURL_GLOBAL_NOTHING);
        curlm = qcurl_multi_init();
 }
@@ -600,13 +814,14 @@ Curl_Shutdown
 Surprise... closes all the stuff. Please do this BEFORE shutting down LHNET.
 ====================
 */
-void Curl_ClearRequirements();
-void Curl_Shutdown()
+void Curl_ClearRequirements(void);
+void Curl_Shutdown(void)
 {
        if(!curl_dll)
                return;
        Curl_ClearRequirements();
        Curl_CancelAll();
+       if (curl_mutex) Thread_DestroyMutex(curl_mutex);
        CURL_CloseLibrary();
        curl_dll = NULL;
 }
@@ -623,7 +838,7 @@ static downloadinfo *Curl_Find(const char *filename)
        downloadinfo *di;
        if(!curl_dll)
                return NULL;
-       for(di = downloads; di; di = di->next)
+       List_For_Each_Entry(di, &downloads, list)
                if(!strcasecmp(di->filename, filename))
                        return di;
        return NULL;
@@ -631,19 +846,16 @@ static downloadinfo *Curl_Find(const char *filename)
 
 void Curl_Cancel_ToMemory(curl_callback_t callback, void *cbdata)
 {
-       downloadinfo *di;
+       downloadinfo *di, *ndi;
        if(!curl_dll)
                return;
-       for(di = downloads; di; )
+       List_For_Each_Entry_Safe(di, ndi, &downloads, list)
        {
                if(di->callback == callback && di->callback_data == cbdata)
                {
                        di->callback = curl_quiet_callback; // do NOT call the callback
-                       Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK);
-                       di = downloads;
+                       Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL);
                }
-               else
-                       di = di->next;
        }
 }
 
@@ -655,19 +867,44 @@ Starts a download of a given URL to the file name portion of this URL (or name
 if given) in the "dlcache/" folder.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+static qbool Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qbool forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
-       if(!curl_dll)
+       if(buf)
+               if(loadtype != LOADTYPE_NONE)
+                       Host_Error("Curl_Begin: loadtype and buffer are both set");
+
+       if(!curl_dll || !cl_curl_enabled.integer)
        {
                return false;
        }
        else
        {
-               char fn[MAX_QPATH];
+               char fn[MAX_OSPATH];
+               char urlbuf[1024];
                const char *p, *q;
                size_t length;
                downloadinfo *di;
 
+               // if URL is protocol:///* or protocol://:port/*, insert the IP of the current server
+               p = strchr(URL, ':');
+               if(p)
+               {
+                       if(!strncmp(p, ":///", 4) || !strncmp(p, "://:", 4))
+                       {
+                               char addressstring[128];
+                               *addressstring = 0;
+                               InfoString_GetValue(cls.userinfo, "*ip", addressstring, sizeof(addressstring));
+                               q = strchr(addressstring, ':');
+                               if(!q)
+                                       q = addressstring + strlen(addressstring);
+                               if(*addressstring)
+                               {
+                                       dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s://%.*s%s", (int) (p - URL), URL, (int) (q - addressstring), addressstring, URL + (p - URL) + 3);
+                                       URL = urlbuf;
+                               }
+                       }
+               }
+
                // Note: This extraction of the file name portion is NOT entirely correct.
                //
                // It does the following:
@@ -694,81 +931,105 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                //
                //   141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
 
-               if(!name)
-                       name = URL;
+               if (curl_mutex) Thread_LockMutex(curl_mutex);
 
-               if(!buf)
+               if(buf)
                {
-                       p = strrchr(name, '/');
-                       p = p ? (p+1) : name;
-                       q = strchr(p, '?');
-                       length = q ? (size_t)(q - p) : strlen(p);
-                       dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p);
+                       if(!name)
+                               name = CleanURL(URL, urlbuf, sizeof(urlbuf));
+               }
+               else
+               {
+                       if(!name)
+                       {
+                               name = CleanURL(URL, urlbuf, sizeof(urlbuf));
+                               p = strrchr(name, '/');
+                               p = p ? (p+1) : name;
+                               q = strchr(p, '?');
+                               length = q ? (size_t)(q - p) : strlen(p);
+                               dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p);
+                       }
+                       else
+                       {
+                               dpsnprintf(fn, sizeof(fn), "dlcache/%s", name);
+                       }
 
                        name = fn; // make it point back
 
                        // already downloading the file?
                        {
-                               downloadinfo *di = Curl_Find(fn);
-                               if(di)
+                               downloadinfo *existingdownloadinfo = Curl_Find(fn);
+                               if(existingdownloadinfo)
                                {
-                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url);
+                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(existingdownloadinfo->url, urlbuf, sizeof(urlbuf)));
 
                                        // however, if it was not for this map yet...
-                                       if(forthismap && !di->forthismap)
+                                       if(forthismap && !existingdownloadinfo->forthismap)
                                        {
-                                               di->forthismap = true;
+                                               existingdownloadinfo->forthismap = true;
                                                // this "fakes" a download attempt so the client will wait for
                                                // the download to finish and then reconnect
                                                ++numdownloads_added;
                                        }
 
+                                       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                                        return false;
                                }
                        }
 
-                       if(ispak && FS_FileExists(fn))
+                       if(FS_FileExists(fn))
                        {
-                               qboolean already_loaded;
-                               if(FS_AddPack(fn, &already_loaded, true))
+                               if(loadtype == LOADTYPE_PAK)
                                {
-                                       Con_DPrintf("%s already exists, not downloading!\n", fn);
-                                       if(already_loaded)
-                                               Con_DPrintf("(pak was already loaded)\n");
-                                       else
+                                       qbool already_loaded;
+                                       if(FS_AddPack(fn, &already_loaded, true))
                                        {
-                                               if(forthismap)
+                                               Con_DPrintf("%s already exists, not downloading!\n", fn);
+                                               if(already_loaded)
+                                                       Con_DPrintf("(pak was already loaded)\n");
+                                               else
                                                {
-                                                       ++numdownloads_added;
-                                                       ++numdownloads_success;
+                                                       if(forthismap)
+                                                       {
+                                                               ++numdownloads_added;
+                                                               ++numdownloads_success;
+                                                       }
                                                }
-                                       }
 
-                                       return false;
-                               }
-                               else
-                               {
-                                       qfile_t *f = FS_OpenVirtualFile(fn, false);
-                                       if(f)
+                                               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
+                                               return false;
+                                       }
+                                       else
                                        {
-                                               char buf[4] = {0};
-                                               FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp
-
-                                               if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4))
+                                               qfile_t *f = FS_OpenRealFile(fn, "rb", false);
+                                               if(f)
                                                {
-                                                       Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn);
-                                                       FS_Close(f);
-                                                       f = FS_OpenRealFile(fn, "wb", false);
-                                                       if(f)
+                                                       char b[4] = {0};
+                                                       FS_Read(f, b, sizeof(b)); // no "-1", I will use memcmp
+
+                                                       if(memcmp(b, "PK\x03\x04", 4) && memcmp(b, "PACK", 4))
+                                                       {
+                                                               Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn);
                                                                FS_Close(f);
-                                               }
-                                               else
-                                               {
-                                                       // OK
-                                                       FS_Close(f);
+                                                               f = FS_OpenRealFile(fn, "wb", false);
+                                                               if(f)
+                                                                       FS_Close(f);
+                                                       }
+                                                       else
+                                                       {
+                                                               // OK
+                                                               FS_Close(f);
+                                                       }
                                                }
                                        }
                                }
+                               else
+                               {
+                                       // never resume these
+                                       qfile_t *f = FS_OpenRealFile(fn, "wb", false);
+                                       if(f)
+                                               FS_Close(f);
+                               }
                        }
                }
 
@@ -776,7 +1037,8 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                // URL scheme (so one can't read local files using file://)
                if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8))
                {
-                       Con_Printf("Curl_Begin(\"%s\")): nasty URL scheme rejected\n", URL);
+                       Con_Printf("Curl_Begin(\"%s\"): nasty URL scheme rejected\n", URL);
+                       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                        return false;
                }
 
@@ -791,13 +1053,12 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                di->startpos = 0;
                di->curle = NULL;
                di->started = false;
-               di->ispak = ispak;
+               di->loadtype = loadtype;
+               di->maxspeed = maxspeed;
                di->bytes_received = 0;
-               di->next = downloads;
-               di->prev = NULL;
-               if(di->next)
-                       di->next->prev = di;
-
+               di->bytes_received_curl = 0;
+               di->bytes_sent_curl = 0;
+               di->extraheaders = extraheaders;
                di->buffer = buf;
                di->buffersize = bufsize;
                if(callback == NULL)
@@ -811,31 +1072,55 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                        di->callback_data = cbdata;
                }
 
-               downloads = di;
+               if(post_content_type)
+               {
+                       di->post_content_type = post_content_type;
+                       di->postbuf = postbuf;
+                       di->postbufsize = postbufsize;
+               }
+               else
+               {
+                       di->post_content_type = NULL;
+                       di->postbuf = NULL;
+                       di->postbufsize = 0;
+               }
+
+               List_Add(&di->list, &downloads);
+
+               if (curl_mutex)
+                       Thread_UnlockMutex(curl_mutex);
+
                return true;
        }
 }
 
-qboolean Curl_Begin_ToFile(const char *URL, const char *name, qboolean ispak, qboolean forthismap)
+qbool Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, int loadtype, qbool forthismap)
 {
-       return Curl_Begin(URL, name, ispak, forthismap, NULL, 0, NULL, NULL);
+       return Curl_Begin(URL, NULL, maxspeed, name, loadtype, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL);
 }
-qboolean Curl_Begin_ToMemory(const char *URL, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+qbool Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
-       return Curl_Begin(URL, NULL, false, false, buf, bufsize, callback, cbdata);
+       return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata);
+}
+qbool Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+{
+       return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata);
 }
 
 /*
 ====================
-Curl_Run
+Curl_Frame
 
 call this regularily as this will always download as much as possible without
 blocking.
 ====================
 */
-void Curl_Run()
+void Curl_Frame(void)
 {
-       noclear = FALSE;
+       double maxspeed;
+       downloadinfo *di;
+
+       noclear = false;
 
        if(!cl_curl_enabled.integer)
                return;
@@ -843,13 +1128,21 @@ void Curl_Run()
        if(!curl_dll)
                return;
 
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+
        Curl_CheckCommandWhenDone();
 
-       if(!downloads)
+       if(List_Is_Empty(&downloads))
+       {
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                return;
+       }
 
-       if(realtime < curltime) // throttle
+       if(host.realtime < curltime) // throttle
+       {
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                return;
+       }
 
        {
                int remaining;
@@ -861,6 +1154,20 @@ void Curl_Run()
                }
                while(mc == CURLM_CALL_MULTI_PERFORM);
 
+               List_For_Each_Entry(di, &downloads, list)
+               {
+                       double b = 0;
+                       if(di->curle)
+                       {
+                               qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
+                               bytes_sent += (b - di->bytes_sent_curl);
+                               di->bytes_sent_curl = b;
+                               qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
+                               bytes_sent += (b - di->bytes_received_curl);
+                               di->bytes_received_curl = b;
+                       }
+               }
+
                for(;;)
                {
                        CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
@@ -868,7 +1175,7 @@ void Curl_Run()
                                break;
                        if(msg->msg == CURLMSG_DONE)
                        {
-                               downloadinfo *di;
+                               const char *ct = NULL;
                                CurlStatus failed = CURL_DOWNLOAD_SUCCESS;
                                CURLcode result;
                                qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di);
@@ -886,12 +1193,13 @@ void Curl_Run()
                                                case 4: // e.g. 404?
                                                case 5: // e.g. 500?
                                                        failed = CURL_DOWNLOAD_SERVERERROR;
-                                                       result = code;
+                                                       result = (CURLcode) code;
                                                        break;
                                        }
+                                       qcurl_easy_getinfo(msg->easy_handle, CURLINFO_CONTENT_TYPE, &ct);
                                }
 
-                               Curl_EndDownload(di, failed, result);
+                               Curl_EndDownload(di, failed, result, ct);
                        }
                }
        }
@@ -901,14 +1209,26 @@ void Curl_Run()
        // when will we curl the next time?
        // we will wait a bit to ensure our download rate is kept.
        // we now know that realtime >= curltime... so set up a new curltime
-       if(cl_curl_maxspeed.value > 0)
+
+       // use the slowest allowing download to derive the maxspeed... this CAN
+       // be done better, but maybe later
+       maxspeed = cl_curl_maxspeed.value;
+       List_For_Each_Entry(di, &downloads, list)
+               if(di->maxspeed > 0)
+                       if(di->maxspeed < maxspeed || maxspeed <= 0)
+                               maxspeed = di->maxspeed;
+
+       if(maxspeed > 0)
        {
-               unsigned long bytes = bytes_received; // maybe smoothen a bit?
-               curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
-               bytes_received -= bytes;
+               double bytes = bytes_sent + bytes_received; // maybe smoothen a bit?
+               curltime = host.realtime + bytes / (maxspeed * 1024.0);
+               bytes_sent = 0;
+               bytes_received = 0;
        }
        else
-               curltime = realtime;
+               curltime = host.realtime;
+
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -918,31 +1238,35 @@ Curl_CancelAll
 Stops ALL downloads.
 ====================
 */
-void Curl_CancelAll()
+void Curl_CancelAll(void)
 {
        if(!curl_dll)
                return;
 
-       while(downloads)
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+
+       while(!List_Is_Empty(&downloads))
        {
-               Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK);
+               Curl_EndDownload(List_First_Entry(&downloads, downloadinfo, list), CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL);
                // INVARIANT: downloads will point to the next download after that!
        }
+
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
 ====================
 Curl_Running
 
-returns true iff there is a download running.
+returns true if there is a download running.
 ====================
 */
-qboolean Curl_Running()
+qbool Curl_Running(void)
 {
        if(!curl_dll)
                return false;
 
-       return downloads != NULL;
+       return !List_Is_Empty(&downloads);
 }
 
 /*
@@ -962,7 +1286,7 @@ static double Curl_GetDownloadAmount(downloadinfo *di)
                double length;
                qcurl_easy_getinfo(di->curle, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &length);
                if(length > 0)
-                       return di->bytes_received / length;
+                       return (di->startpos + di->bytes_received) / (di->startpos + length);
                else
                        return 0;
        }
@@ -999,18 +1323,20 @@ prints the download list
 ====================
 */
 // TODO rewrite using Curl_GetDownloadInfo?
-static void Curl_Info_f()
+static void Curl_Info_f(cmd_state_t *cmd)
 {
        downloadinfo *di;
+       char urlbuf[1024];
        if(!curl_dll)
                return;
        if(Curl_Running())
        {
+               if (curl_mutex) Thread_LockMutex(curl_mutex);
                Con_Print("Currently running downloads:\n");
-               for(di = downloads; di; di = di->next)
+               List_For_Each_Entry(di, &downloads, list)
                {
                        double speed, percent;
-                       Con_Printf("  %s -> %s ",  di->url, di->filename);
+                       Con_Printf("  %s -> %s ",  CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
                        percent = 100.0 * Curl_GetDownloadAmount(di);
                        speed = Curl_GetDownloadSpeed(di);
                        if(percent >= 0)
@@ -1018,6 +1344,7 @@ static void Curl_Info_f()
                        else
                                Con_Print("(queued)\n");
                }
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
        }
        else
        {
@@ -1051,12 +1378,13 @@ curl --finish_autodownload
        once the last download completes successfully, reconnect to the current server
 ====================
 */
-void Curl_Curl_f(void)
+static void Curl_Curl_f(cmd_state_t *cmd)
 {
+       double maxspeed = 0;
        int i;
        int end;
-       qboolean pak = false;
-       qboolean forthismap = false;
+       int loadtype = LOADTYPE_NONE;
+       qbool forthismap = false;
        const char *url;
        const char *name = 0;
 
@@ -1072,25 +1400,21 @@ void Curl_Curl_f(void)
                return;
        }
 
-       for(i = 0; i != Cmd_Argc(); ++i)
-               Con_DPrintf("%s ", Cmd_Argv(i));
-       Con_DPrint("\n");
-
-       if(Cmd_Argc() < 2)
+       if(Cmd_Argc(cmd) < 2)
        {
                Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
                return;
        }
 
-       url = Cmd_Argv(Cmd_Argc() - 1);
-       end = Cmd_Argc();
+       url = Cmd_Argv(cmd, Cmd_Argc(cmd) - 1);
+       end = Cmd_Argc(cmd);
 
        for(i = 1; i != end; ++i)
        {
-               const char *a = Cmd_Argv(i);
+               const char *a = Cmd_Argv(cmd, i);
                if(!strcmp(a, "--info"))
                {
-                       Curl_Info_f();
+                       Curl_Info_f(cmd);
                        return;
                }
                else if(!strcmp(a, "--cancel"))
@@ -1101,7 +1425,7 @@ void Curl_Curl_f(void)
                        {
                                downloadinfo *di = Curl_Find(url);
                                if(di)
-                                       Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK);
+                                       Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL);
                                else
                                        Con_Print("download not found\n");
                        }
@@ -1109,13 +1433,21 @@ void Curl_Curl_f(void)
                }
                else if(!strcmp(a, "--pak"))
                {
-                       pak = true;
+                       loadtype = LOADTYPE_PAK;
                }
-               else if(!strcmp(a, "--for"))
+               else if(!strcmp(a, "--cachepic"))
+               {
+                       loadtype = LOADTYPE_CACHEPIC;
+               }
+               else if(!strcmp(a, "--skinframe"))
+               {
+                       loadtype = LOADTYPE_SKINFRAME;
+               }
+               else if(!strcmp(a, "--for")) // must be last option
                {
                        for(i = i + 1; i != end - 1; ++i)
                        {
-                               if(!FS_FileExists(Cmd_Argv(i)))
+                               if(!FS_FileExists(Cmd_Argv(cmd, i)))
                                        goto needthefile; // why can't I have a "double break"?
                        }
                        // if we get here, we have all the files...
@@ -1130,7 +1462,7 @@ void Curl_Curl_f(void)
                        if(i < end - 1)
                        {
                                ++i;
-                               name = Cmd_Argv(i);
+                               name = Cmd_Argv(cmd, i);
                        }
                }
                else if(!strcmp(a, "--clear_autodownload"))
@@ -1151,9 +1483,9 @@ void Curl_Curl_f(void)
                                        {
                                                dpsnprintf(donecommand, sizeof(donecommand), "connect %s", cls.netcon->address);
                                                Curl_CommandWhenDone(donecommand);
-                                               noclear = TRUE;
+                                               noclear = true;
                                                CL_Disconnect();
-                                               noclear = FALSE;
+                                               noclear = false;
                                                Curl_CheckCommandWhenDone();
                                        }
                                        else
@@ -1162,15 +1494,19 @@ void Curl_Curl_f(void)
                        }
                        return;
                }
+               else if(!strncmp(a, "--maxspeed=", 11))
+               {
+                       maxspeed = atof(a + 11);
+               }
                else if(*a == '-')
                {
-                       Con_Printf("invalid option %s\n", a);
-                       return;
+                       Con_Printf("curl: invalid option %s\n", a);
+                       // but we ignore the option
                }
        }
 
 needthefile:
-       Curl_Begin_ToFile(url, name, pak, forthismap);
+       Curl_Begin_ToFile(url, maxspeed, name, loadtype, forthismap);
 }
 
 /*
@@ -1180,10 +1516,10 @@ static void curl_curlcat_callback(int code, size_t length_received, unsigned cha
        Z_Free(buffer);
 }
 
-void Curl_CurlCat_f(void)
+void Curl_CurlCat_f(cmd_state_t *cmd)
 {
        unsigned char *buf;
-       const char *url = Cmd_Argv(1);
+       const char *url = Cmd_Argv(cmd, 1);
        buf = Z_Malloc(16384);
        Curl_Begin_ToMemory(url, buf, 16384, curl_curlcat_callback, NULL);
 }
@@ -1203,8 +1539,11 @@ void Curl_Init_Commands(void)
        Cvar_RegisterVariable (&cl_curl_maxspeed);
        Cvar_RegisterVariable (&sv_curl_defaulturl);
        Cvar_RegisterVariable (&sv_curl_serverpackages);
-       Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
-       //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
+       Cvar_RegisterVariable (&sv_curl_maxspeed);
+       Cvar_RegisterVariable (&cl_curl_useragent);
+       Cvar_RegisterVariable (&cl_curl_useragent_append);
+       Cmd_AddCommand(CF_CLIENT | CF_CLIENT_FROM_SERVER, "curl", Curl_Curl_f, "download data from an URL and add to search path");
+       //Cmd_AddCommand(cmd_local, "curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
 }
 
 /*
@@ -1218,12 +1557,11 @@ information, or to NULL if no such display shall occur. The returned
 array must be freed later using Z_Free.
 ====================
 */
-Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info)
+Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info, char *addinfo, size_t addinfolength)
 {
        int i;
        downloadinfo *di;
        Curl_downloadinfo_t *downinfo;
-       static char addinfo[128];
 
        if(!curl_dll)
        {
@@ -1233,16 +1571,18 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
                return NULL;
        }
 
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+
        i = 0;
-       for(di = downloads; di; di = di->next)
+       List_For_Each_Entry(di, &downloads, list)
                ++i;
 
        downinfo = (Curl_downloadinfo_t *) Z_Malloc(sizeof(*downinfo) * i);
        i = 0;
-       for(di = downloads; di; di = di->next)
+       List_For_Each_Entry(di, &downloads, list)
        {
                // do not show infobars for background downloads
-               if(!developer.integer)
+               if(developer.integer <= 0)
                        if(di->buffer)
                                continue;
                strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
@@ -1265,11 +1605,11 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
                if(*command_when_done && !numdownloads_fail && numdownloads_added)
                {
                        if(!strncmp(command_when_done, "connect ", 8))
-                               dpsnprintf(addinfo, sizeof(addinfo), "(will join %s when done)", command_when_done + 8);
+                               dpsnprintf(addinfo, addinfolength, "(will join %s when done)", command_when_done + 8);
                        else if(!strcmp(command_when_done, "cl_begindownloads"))
-                               dpsnprintf(addinfo, sizeof(addinfo), "(will enter the game when done)");
+                               dpsnprintf(addinfo, addinfolength, "(will enter the game when done)");
                        else
-                               dpsnprintf(addinfo, sizeof(addinfo), "(will do '%s' when done)", command_when_done);
+                               dpsnprintf(addinfo, addinfolength, "(will do '%s' when done)", command_when_done);
                        *additional_info = addinfo;
                }
                else
@@ -1277,6 +1617,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
        }
 
        *nDownloads = i;
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
        return downinfo;
 }
 
@@ -1307,7 +1648,7 @@ this file for obvious reasons.
 */
 static const char *Curl_FindPackURL(const char *filename)
 {
-       static char foundurl[256];
+       static char foundurl[1024]; // invoked only by server
        fs_offset_t filesize;
        char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize);
        if(buf && filesize)
@@ -1315,7 +1656,7 @@ static const char *Curl_FindPackURL(const char *filename)
                // read lines of format "pattern url"
                char *p = buf;
                char *pattern = NULL, *patternend = NULL, *url = NULL, *urlend = NULL;
-               qboolean eof = false;
+               qbool eof = false;
 
                pattern = p;
                while(!eof)
@@ -1370,7 +1711,7 @@ static const char *Curl_FindPackURL(const char *filename)
 typedef struct requirement_s
 {
        struct requirement_s *next;
-       char filename[MAX_QPATH];
+       char filename[MAX_OSPATH];
 }
 requirement;
 static requirement *requirements = NULL;
@@ -1399,22 +1740,14 @@ Clears the list of required files for playing on the current map.
 This should be called at every map change.
 ====================
 */
-void Curl_ClearRequirements()
+void Curl_ClearRequirements(void)
 {
-       const char *p;
        while(requirements)
        {
                requirement *req = requirements;
                requirements = requirements->next;
                Z_Free(req);
        }
-       p = sv_curl_serverpackages.string;
-       Con_DPrintf("Require all of: %s\n", p);
-       while(COM_ParseToken_Simple(&p, false, false))
-       {
-               Con_DPrintf("Require: %s\n", com_token);
-               Curl_RequireFile(com_token);
-       }
 }
 
 /*
@@ -1429,51 +1762,62 @@ This is done by sending him the following console commands:
        curl --finish_autodownload
 ====================
 */
-void Curl_SendRequirements()
+static qbool Curl_SendRequirement(const char *filename, qbool foundone, char *sendbuffer, size_t sendbuffer_len)
 {
-       // for each requirement, find the pack name
-       char sendbuffer[4096] = "";
-       requirement *req;
-       qboolean foundone = false;
+       const char *p;
+       const char *thispack = FS_WhichPack(filename);
+       const char *packurl;
 
-       for(req = requirements; req; req = req->next)
-       {
-               const char *p;
-               const char *thispack = FS_WhichPack(req->filename);
-               const char *packurl;
+       if(!thispack || !*thispack)
+               return false;
 
-               if(!thispack)
-                       continue;
+       p = strrchr(thispack, '/');
+       if(p)
+               thispack = p + 1;
 
-               p = strrchr(thispack, '/');
-               if(p)
-                       thispack = p + 1;
+       packurl = Curl_FindPackURL(thispack);
 
-               packurl = Curl_FindPackURL(thispack);
+       if(packurl && *packurl && strcmp(packurl, "-"))
+       {
+               if(!foundone)
+                       strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len);
+
+               strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               if(sv_curl_maxspeed.value > 0)
+                       dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
+               strlcat(sendbuffer, " --for ", sendbuffer_len);
+               strlcat(sendbuffer, filename, sendbuffer_len);
+               strlcat(sendbuffer, " ", sendbuffer_len);
+               strlcat(sendbuffer, packurl, sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               strlcat(sendbuffer, "\n", sendbuffer_len);
 
-               if(packurl && *packurl && strcmp(packurl, "-"))
-               {
-                       if(!foundone)
-                               strlcat(sendbuffer, "curl --clear_autodownload\n", sizeof(sendbuffer));
-
-                       strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, packurl, sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, "\n", sizeof(sendbuffer));
-
-                       foundone = true;
-               }
+               return true;
        }
 
+       return false;
+}
+void Curl_SendRequirements(void)
+{
+       // for each requirement, find the pack name
+       char sendbuffer[4096] = "";
+       requirement *req;
+       qbool foundone = false;
+       const char *p;
+
+       for(req = requirements; req; req = req->next)
+               foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
+
+       p = sv_curl_serverpackages.string;
+       while(COM_ParseToken_Simple(&p, false, false, true))
+               foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
+
        if(foundone)
                strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));
 
        if(strlen(sendbuffer) + 1 < sizeof(sendbuffer))
-               Host_ClientCommands("%s", sendbuffer);
+               SV_ClientCommands("%s", sendbuffer);
        else
                Con_Printf("Could not initiate autodownload due to URL buffer overflow\n");
 }