X-Git-Url: https://git.xonotic.org/?p=xonotic%2Fdarkplaces.git;a=blobdiff_plain;f=libcurl.c;h=26cb7a4914de0cc915f19fa813d73fe056d80422;hp=de205748dbd01855e5bc1bcc47d9990a78835f00;hb=HEAD;hpb=df61ed83cc1ab2a3e2fcd220cf89dd0b9d4144af diff --git a/libcurl.c b/libcurl.c index de205748..d4b2defc 100644 --- a/libcurl.c +++ b/libcurl.c @@ -2,19 +2,22 @@ #include "fs.h" #include "libcurl.h" #include "thread.h" - +#include "com_list.h" #include "image.h" #include "jpeg.h" #include "image_png.h" -static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"}; -static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"}; -static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"}; -static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"}; -static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"}; -static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"}; -static cvar_t cl_curl_useragent = {0, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"}; -static cvar_t cl_curl_useragent_append = {0, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"}; +static cvar_t curl_enabled = {CF_SHARED | CF_ARCHIVE, "curl_enabled","1", "whether libcurl may be used to GET files or POST data"}; +static cvar_t curl_maxdownloads = {CF_SHARED | CF_ARCHIVE, "curl_maxdownloads","3", "maximum number of concurrent HTTP/FTP downloads"}; +static cvar_t curl_maxspeed = {CF_SHARED | CF_ARCHIVE, "curl_maxspeed","0", "maximum download speed (KiB/s)"}; +static cvar_t curl_useragent = {CF_SHARED, "curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"}; +static cvar_t curl_useragent_append = {CF_SHARED, "curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"}; + +static cvar_t sv_curl_defaulturl = {CF_SERVER, "sv_curl_defaulturl","", "default autodownload source URL"}; +static cvar_t sv_curl_serverpackages = {CF_SERVER, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"}; +static cvar_t sv_curl_maxspeed = {CF_SERVER, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"}; + +static cvar_t developer_curl = {CF_SHARED, "developer_curl","0", "whether verbose libcurl output should be printed to stderr"}; /* ================================================================= @@ -62,6 +65,7 @@ typedef enum CINIT(LOW_SPEED_TIME, LONG, 20), CINIT(RESUME_FROM, LONG, 21), CINIT(HTTPHEADER, OBJECTPOINT, 23), + CINIT(VERBOSE, LONG, 41), CINIT(POST, LONG, 47), /* HTTP POST method */ CINIT(FOLLOWLOCATION, LONG, 52), /* use Location: Luke! */ CINIT(POSTFIELDSIZE, LONG, 60), @@ -156,6 +160,7 @@ static const char * (*qcurl_easy_strerror) (CURLcode); static CURLM * (*qcurl_multi_init) (void); static CURLMcode (*qcurl_multi_perform) (CURLM *multi_handle, int *running_handles); +static CURLMcode (*qcurl_multi_wait) (CURLM *multi_handle, void*, unsigned int extra_nfds, int timeout_ms, int *ret); static CURLMcode (*qcurl_multi_add_handle) (CURLM *multi_handle, CURL *easy_handle); static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_handle); static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue); @@ -175,6 +180,7 @@ static dllfunction_t curlfuncs[] = {"curl_easy_getinfo", (void **) &qcurl_easy_getinfo}, {"curl_multi_init", (void **) &qcurl_multi_init}, {"curl_multi_perform", (void **) &qcurl_multi_perform}, + {"curl_multi_wait", (void **) &qcurl_multi_wait}, {"curl_multi_add_handle", (void **) &qcurl_multi_add_handle}, {"curl_multi_remove_handle",(void **) &qcurl_multi_remove_handle}, {"curl_multi_info_read", (void **) &qcurl_multi_info_read}, @@ -204,13 +210,13 @@ typedef struct downloadinfo_s qfile_t *stream; fs_offset_t startpos; CURL *curle; - qboolean started; + qbool started; int loadtype; size_t bytes_received; // for buffer double bytes_received_curl; // for throttling double bytes_sent_curl; // for throttling - struct downloadinfo_s *next, *prev; - qboolean forthismap; + llist_t list; + qbool forthismap; double maxspeed; curl_slist *slist; // http headers @@ -225,10 +231,10 @@ typedef struct downloadinfo_s const char *extraheaders; } downloadinfo; -static downloadinfo *downloads = NULL; +LIST_HEAD(downloads); static int numdownloads = 0; -static qboolean noclear = FALSE; +static qbool noclear = false; static int numdownloads_fail = 0; static int numdownloads_success = 0; @@ -250,7 +256,7 @@ static void Curl_CommandWhenDone(const char *cmd) if(!curl_dll) return; if(cmd) - strlcpy(command_when_done, cmd, sizeof(command_when_done)); + dp_strlcpy(command_when_done, cmd, sizeof(command_when_done)); else *command_when_done = 0; } @@ -266,7 +272,7 @@ static void Curl_CommandWhenError(const char *cmd) if(!curl_dll) return; if(cmd) - strlcpy(command_when_error, cmd, sizeof(command_when_error)); + dp_strlcpy(command_when_error, cmd, sizeof(command_when_error)); else *command_when_error = 0; } @@ -284,7 +290,7 @@ void Curl_Clear_forthismap(void) if(noclear) return; if (curl_mutex) Thread_LockMutex(curl_mutex); - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) di->forthismap = false; Curl_CommandWhenError(NULL); Curl_CommandWhenDone(NULL); @@ -301,7 +307,7 @@ Curl_Have_forthismap Returns true if a download needed for the current game is running. ==================== */ -qboolean Curl_Have_forthismap(void) +qbool Curl_Have_forthismap(void) { return numdownloads_added != 0; } @@ -331,16 +337,16 @@ static void Curl_CheckCommandWhenDone(void) if(numdownloads_fail == 0) { Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done); - Cbuf_AddText("\n"); - Cbuf_AddText(command_when_done); - Cbuf_AddText("\n"); + Cbuf_AddText(cmd_local, "\n"); + Cbuf_AddText(cmd_local, command_when_done); + Cbuf_AddText(cmd_local, "\n"); } else { Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error); - Cbuf_AddText("\n"); - Cbuf_AddText(command_when_error); - Cbuf_AddText("\n"); + Cbuf_AddText(cmd_local, "\n"); + Cbuf_AddText(cmd_local, command_when_error); + Cbuf_AddText(cmd_local, "\n"); } Curl_Clear_forthismap(); } @@ -353,7 +359,7 @@ CURL_CloseLibrary Load the cURL DLL ==================== */ -static qboolean CURL_OpenLibrary (void) +static qbool CURL_OpenLibrary (void) { const char* dllnames [] = { @@ -377,7 +383,7 @@ static qboolean CURL_OpenLibrary (void) return true; // Load the DLL - return Sys_LoadLibrary (dllnames, &curl_dll, curlfuncs); + return Sys_LoadDependency (dllnames, &curl_dll, curlfuncs); } @@ -390,7 +396,7 @@ Unload the cURL DLL */ static void CURL_CloseLibrary (void) { - Sys_UnloadLibrary (&curl_dll); + Sys_FreeLibrary (&curl_dll); } @@ -430,6 +436,8 @@ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi) di->bytes_received += bytes; + //Con_Printf("CURL_fwrite callback timestamp: %f bytes: %ld\n", host.realtime, ret); + return ret; // Why not ret / nmemb? // Because CURLOPT_WRITEFUNCTION docs say to return the number of bytes. @@ -509,11 +517,11 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error code from libcurl, or 0, if another error has occurred. ==================== */ -static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata); +static qbool Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qbool forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata); static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error, const char *content_type_) { char content_type[64]; - qboolean ok = false; + qbool ok = false; if(!curl_dll) return; switch(status) @@ -545,7 +553,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error break; } if(content_type_) - strlcpy(content_type, content_type_, sizeof(content_type)); + dp_strlcpy(content_type, content_type_, sizeof(content_type)); else *content_type = 0; @@ -581,7 +589,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error if(ok && di->loadtype == LOADTYPE_PAK) { - ok = FS_AddPack(di->filename, NULL, true); + ok = FS_AddPack(di->filename, NULL, true, true); if(!ok) CLEAR_AND_RETRY(); } @@ -620,12 +628,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error CLEAR_AND_RETRY(); } - if(di->prev) - di->prev->next = di->next; - else - downloads = di->next; - if(di->next) - di->next->prev = di->prev; + List_Delete(&di->list); --numdownloads; if(di->forthismap) @@ -684,10 +687,10 @@ static void CheckPendingDownloads(void) char vabuf[1024]; if(!curl_dll) return; - if(numdownloads < cl_curl_maxdownloads.integer) + if(numdownloads < curl_maxdownloads.integer) { downloadinfo *di; - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) { if(!di->started) { @@ -718,7 +721,7 @@ static void CheckPendingDownloads(void) di->curle = qcurl_easy_init(); di->slist = NULL; qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url); - if(cl_curl_useragent.integer) + if(curl_useragent.integer) { const char *ua #ifdef HTTP_USER_AGENT @@ -728,17 +731,19 @@ static void CheckPendingDownloads(void) #endif if(!ua) ua = ""; - if(*cl_curl_useragent_append.string) + if(*curl_useragent_append.string) ua = va(vabuf, sizeof(vabuf), "%s%s%s", ua, (ua[0] && ua[strlen(ua)-1] != ' ') ? " " : "", - cl_curl_useragent_append.string); + curl_useragent_append.string); qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ua); } else qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ""); + if(developer_curl.integer) + qcurl_easy_setopt(di->curle, CURLOPT_VERBOSE, (long) 1); qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer); qcurl_easy_setopt(di->curle, CURLOPT_RESUME_FROM, (long) di->startpos); qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 1); @@ -787,7 +792,7 @@ static void CheckPendingDownloads(void) qcurl_multi_add_handle(curlm, di->curle); di->started = true; ++numdownloads; - if(numdownloads >= cl_curl_maxdownloads.integer) + if(numdownloads >= curl_maxdownloads.integer) break; } } @@ -808,7 +813,7 @@ void Curl_Init(void) if(!curl_dll) return; if (Thread_HasThreads()) curl_mutex = Thread_CreateMutex(); - qcurl_global_init(CURL_GLOBAL_NOTHING); + qcurl_global_init(CURL_GLOBAL_SSL); curlm = qcurl_multi_init(); } @@ -831,6 +836,12 @@ void Curl_Shutdown(void) curl_dll = NULL; } +// for VM_checkextension() +qbool Curl_Available(void) +{ + return curl_dll ? true : false; +} + /* ==================== Curl_Find @@ -843,7 +854,7 @@ static downloadinfo *Curl_Find(const char *filename) downloadinfo *di; if(!curl_dll) return NULL; - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) if(!strcasecmp(di->filename, filename)) return di; return NULL; @@ -851,19 +862,16 @@ static downloadinfo *Curl_Find(const char *filename) void Curl_Cancel_ToMemory(curl_callback_t callback, void *cbdata) { - downloadinfo *di; + downloadinfo *di, *ndi; if(!curl_dll) return; - for(di = downloads; di; ) + List_For_Each_Entry_Safe(di, ndi, &downloads, downloadinfo, list) { if(di->callback == callback && di->callback_data == cbdata) { di->callback = curl_quiet_callback; // do NOT call the callback Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL); - di = downloads; } - else - di = di->next; } } @@ -875,13 +883,13 @@ Starts a download of a given URL to the file name portion of this URL (or name if given) in the "dlcache/" folder. ==================== */ -static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) +static qbool Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qbool forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) { if(buf) if(loadtype != LOADTYPE_NONE) Host_Error("Curl_Begin: loadtype and buffer are both set"); - if(!curl_dll || !cl_curl_enabled.integer) + if(!curl_dll || !curl_enabled.integer) { return false; } @@ -989,8 +997,8 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max { if(loadtype == LOADTYPE_PAK) { - qboolean already_loaded; - if(FS_AddPack(fn, &already_loaded, true)) + qbool already_loaded; + if(FS_AddPack(fn, &already_loaded, true, true)) { Con_DPrintf("%s already exists, not downloading!\n", fn); if(already_loaded) @@ -1053,8 +1061,8 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max if(forthismap) ++numdownloads_added; di = (downloadinfo *) Z_Malloc(sizeof(*di)); - strlcpy(di->filename, name, sizeof(di->filename)); - strlcpy(di->url, URL, sizeof(di->url)); + dp_strlcpy(di->filename, name, sizeof(di->filename)); + dp_strlcpy(di->url, URL, sizeof(di->url)); dpsnprintf(di->referer, sizeof(di->referer), "dp://%s/", cls.netcon ? cls.netcon->address : "notconnected.invalid"); di->forthismap = forthismap; di->stream = NULL; @@ -1067,11 +1075,6 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max di->bytes_received_curl = 0; di->bytes_sent_curl = 0; di->extraheaders = extraheaders; - di->next = downloads; - di->prev = NULL; - if(di->next) - di->next->prev = di; - di->buffer = buf; di->buffersize = bufsize; if(callback == NULL) @@ -1098,41 +1101,44 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max di->postbufsize = 0; } - downloads = di; - if (curl_mutex) Thread_UnlockMutex(curl_mutex); + List_Add(&di->list, &downloads); + + if (curl_mutex) + Thread_UnlockMutex(curl_mutex); + return true; } } -qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, int loadtype, qboolean forthismap) +qbool Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, int loadtype, qbool forthismap) { return Curl_Begin(URL, NULL, maxspeed, name, loadtype, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL); } -qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) +qbool Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) { return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata); } -qboolean Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) +qbool Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) { return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata); } /* ==================== -Curl_Run +Curl_Frame call this regularily as this will always download as much as possible without blocking. ==================== */ -void Curl_Run(void) +void Curl_Frame(void) { double maxspeed; downloadinfo *di; - noclear = FALSE; + noclear = false; - if(!cl_curl_enabled.integer) + if(!curl_enabled.integer && cls.state != ca_dedicated) return; if(!curl_dll) @@ -1142,13 +1148,13 @@ void Curl_Run(void) Curl_CheckCommandWhenDone(); - if(!downloads) + if(List_Is_Empty(&downloads)) { if (curl_mutex) Thread_UnlockMutex(curl_mutex); return; } - if(realtime < curltime) // throttle + if(host.realtime < curltime) // throttle { if (curl_mutex) Thread_UnlockMutex(curl_mutex); return; @@ -1164,7 +1170,7 @@ void Curl_Run(void) } while(mc == CURLM_CALL_MULTI_PERFORM); - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) { double b = 0; if(di->curle) @@ -1222,8 +1228,8 @@ void Curl_Run(void) // use the slowest allowing download to derive the maxspeed... this CAN // be done better, but maybe later - maxspeed = cl_curl_maxspeed.value; - for(di = downloads; di; di = di->next) + maxspeed = curl_maxspeed.value; + List_For_Each_Entry(di, &downloads, downloadinfo, list) if(di->maxspeed > 0) if(di->maxspeed < maxspeed || maxspeed <= 0) maxspeed = di->maxspeed; @@ -1231,16 +1237,44 @@ void Curl_Run(void) if(maxspeed > 0) { double bytes = bytes_sent + bytes_received; // maybe smoothen a bit? - curltime = realtime + bytes / (maxspeed * 1024.0); + curltime = host.realtime + bytes / (maxspeed * 1024.0); bytes_sent = 0; bytes_received = 0; } else - curltime = realtime; + curltime = host.realtime; if (curl_mutex) Thread_UnlockMutex(curl_mutex); } +/* +==================== +Curl_Select + +Sleeps until there's some transfer progress or a timeout is reached, +unfortunately the timeout is only in milliseconds. +This allows good throughput even at very low FPS. +Less important on newer libcurl versions but still helps. + +Returns 0 immediately if there's no transfers to wait for, +or > 0 if a transfer is ready or the timeout was reached. +==================== +*/ +int Curl_Select(int timeout_ms) +{ + CURLMcode err; + int numfds; + + if (List_Is_Empty(&downloads)) + return 0; + + err = qcurl_multi_wait(curlm, NULL, 0, timeout_ms, &numfds); + if (err == CURLM_OK) + return numfds; + Con_Printf("curl_multi_wait() failed, code %d\n", err); + return 0; +} + /* ==================== Curl_CancelAll @@ -1255,9 +1289,9 @@ void Curl_CancelAll(void) if (curl_mutex) Thread_LockMutex(curl_mutex); - while(downloads) + while(!List_Is_Empty(&downloads)) { - Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL); + Curl_EndDownload(List_First_Entry(&downloads, downloadinfo, list), CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL); // INVARIANT: downloads will point to the next download after that! } @@ -1268,15 +1302,15 @@ void Curl_CancelAll(void) ==================== Curl_Running -returns true iff there is a download running. +returns true if there is a download running. ==================== */ -qboolean Curl_Running(void) +qbool Curl_Running(void) { if(!curl_dll) return false; - return downloads != NULL; + return !List_Is_Empty(&downloads); } /* @@ -1333,7 +1367,7 @@ prints the download list ==================== */ // TODO rewrite using Curl_GetDownloadInfo? -static void Curl_Info_f(void) +static void Curl_Info_f(cmd_state_t *cmd) { downloadinfo *di; char urlbuf[1024]; @@ -1343,7 +1377,7 @@ static void Curl_Info_f(void) { if (curl_mutex) Thread_LockMutex(curl_mutex); Con_Print("Currently running downloads:\n"); - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) { double speed, percent; Con_Printf(" %s -> %s ", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename); @@ -1388,13 +1422,13 @@ curl --finish_autodownload once the last download completes successfully, reconnect to the current server ==================== */ -static void Curl_Curl_f(void) +static void Curl_Curl_f(cmd_state_t *cmd) { double maxspeed = 0; int i; int end; int loadtype = LOADTYPE_NONE; - qboolean forthismap = false; + qbool forthismap = false; const char *url; const char *name = 0; @@ -1404,27 +1438,27 @@ static void Curl_Curl_f(void) return; } - if(!cl_curl_enabled.integer) + if(!curl_enabled.integer) { Con_Print("curl support not enabled. Set cl_curl_enabled to 1 to enable.\n"); return; } - if(Cmd_Argc() < 2) + if(Cmd_Argc(cmd) < 2) { Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n"); return; } - url = Cmd_Argv(Cmd_Argc() - 1); - end = Cmd_Argc(); + url = Cmd_Argv(cmd, Cmd_Argc(cmd) - 1); + end = Cmd_Argc(cmd); for(i = 1; i != end; ++i) { - const char *a = Cmd_Argv(i); + const char *a = Cmd_Argv(cmd, i); if(!strcmp(a, "--info")) { - Curl_Info_f(); + Curl_Info_f(cmd); return; } else if(!strcmp(a, "--cancel")) @@ -1457,7 +1491,7 @@ static void Curl_Curl_f(void) { for(i = i + 1; i != end - 1; ++i) { - if(!FS_FileExists(Cmd_Argv(i))) + if(!FS_FileExists(Cmd_Argv(cmd, i))) goto needthefile; // why can't I have a "double break"? } // if we get here, we have all the files... @@ -1472,7 +1506,7 @@ static void Curl_Curl_f(void) if(i < end - 1) { ++i; - name = Cmd_Argv(i); + name = Cmd_Argv(cmd, i); } } else if(!strcmp(a, "--clear_autodownload")) @@ -1493,9 +1527,9 @@ static void Curl_Curl_f(void) { dpsnprintf(donecommand, sizeof(donecommand), "connect %s", cls.netcon->address); Curl_CommandWhenDone(donecommand); - noclear = TRUE; + noclear = true; CL_Disconnect(); - noclear = FALSE; + noclear = false; Curl_CheckCommandWhenDone(); } else @@ -1526,10 +1560,10 @@ static void curl_curlcat_callback(int code, size_t length_received, unsigned cha Z_Free(buffer); } -void Curl_CurlCat_f(void) +void Curl_CurlCat_f(cmd_state_t *cmd) { unsigned char *buf; - const char *url = Cmd_Argv(1); + const char *url = Cmd_Argv(cmd, 1); buf = Z_Malloc(16384); Curl_Begin_ToMemory(url, buf, 16384, curl_curlcat_callback, NULL); } @@ -1544,16 +1578,25 @@ loads the commands and cvars this library uses */ void Curl_Init_Commands(void) { - Cvar_RegisterVariable (&cl_curl_enabled); - Cvar_RegisterVariable (&cl_curl_maxdownloads); - Cvar_RegisterVariable (&cl_curl_maxspeed); + Cvar_RegisterVariable (&curl_enabled); + Cvar_RegisterVariable (&curl_maxdownloads); + Cvar_RegisterVariable (&curl_maxspeed); + Cvar_RegisterVariable (&curl_useragent); + Cvar_RegisterVariable (&curl_useragent_append); + Cvar_RegisterVirtual (&curl_enabled, "cl_curl_enabled"); + Cvar_RegisterVirtual (&curl_maxdownloads, "cl_curl_maxdownloads"); + Cvar_RegisterVirtual (&curl_maxspeed, "cl_curl_maxspeed"); + Cvar_RegisterVirtual (&curl_useragent, "cl_curl_useragent"); + Cvar_RegisterVirtual (&curl_useragent_append, "cl_curl_useragent_append"); + Cvar_RegisterVariable (&sv_curl_defaulturl); Cvar_RegisterVariable (&sv_curl_serverpackages); Cvar_RegisterVariable (&sv_curl_maxspeed); - Cvar_RegisterVariable (&cl_curl_useragent); - Cvar_RegisterVariable (&cl_curl_useragent_append); - Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path"); - //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)"); + + Cvar_RegisterVariable (&developer_curl); + + Cmd_AddCommand(CF_CLIENT | CF_CLIENT_FROM_SERVER, "curl", Curl_Curl_f, "download data from an URL and add to search path"); + //Cmd_AddCommand(cmd_local, "curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)"); } /* @@ -1584,18 +1627,18 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition if (curl_mutex) Thread_LockMutex(curl_mutex); i = 0; - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) ++i; downinfo = (Curl_downloadinfo_t *) Z_Malloc(sizeof(*downinfo) * i); i = 0; - for(di = downloads; di; di = di->next) + List_For_Each_Entry(di, &downloads, downloadinfo, list) { // do not show infobars for background downloads if(developer.integer <= 0) if(di->buffer) continue; - strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename)); + dp_strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename)); if(di->curle) { downinfo[i].progress = Curl_GetDownloadAmount(di); @@ -1666,7 +1709,7 @@ static const char *Curl_FindPackURL(const char *filename) // read lines of format "pattern url" char *p = buf; char *pattern = NULL, *patternend = NULL, *url = NULL, *urlend = NULL; - qboolean eof = false; + qbool eof = false; pattern = p; while(!eof) @@ -1686,7 +1729,7 @@ static const char *Curl_FindPackURL(const char *filename) *urlend = 0; if(matchpattern(filename, pattern, true)) { - strlcpy(foundurl, url, sizeof(foundurl)); + dp_strlcpy(foundurl, url, sizeof(foundurl)); Z_Free(buf); return foundurl; } @@ -1738,7 +1781,7 @@ void Curl_RequireFile(const char *filename) { requirement *req = (requirement *) Z_Malloc(sizeof(*requirements)); req->next = requirements; - strlcpy(req->filename, filename, sizeof(req->filename)); + dp_strlcpy(req->filename, filename, sizeof(req->filename)); requirements = req; } @@ -1772,7 +1815,7 @@ This is done by sending him the following console commands: curl --finish_autodownload ==================== */ -static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, char *sendbuffer, size_t sendbuffer_len) +static qbool Curl_SendRequirement(const char *filename, qbool foundone, char *sendbuffer, size_t sendbuffer_len) { const char *p; const char *thispack = FS_WhichPack(filename); @@ -1790,18 +1833,18 @@ static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, ch if(packurl && *packurl && strcmp(packurl, "-")) { if(!foundone) - strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len); + dp_strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len); - strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len); - strlcat(sendbuffer, thispack, sendbuffer_len); + dp_strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len); + dp_strlcat(sendbuffer, thispack, sendbuffer_len); if(sv_curl_maxspeed.value > 0) dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value); - strlcat(sendbuffer, " --for ", sendbuffer_len); - strlcat(sendbuffer, filename, sendbuffer_len); - strlcat(sendbuffer, " ", sendbuffer_len); - strlcat(sendbuffer, packurl, sendbuffer_len); - strlcat(sendbuffer, thispack, sendbuffer_len); - strlcat(sendbuffer, "\n", sendbuffer_len); + dp_strlcat(sendbuffer, " --for ", sendbuffer_len); + dp_strlcat(sendbuffer, filename, sendbuffer_len); + dp_strlcat(sendbuffer, " ", sendbuffer_len); + dp_strlcat(sendbuffer, packurl, sendbuffer_len); + dp_strlcat(sendbuffer, thispack, sendbuffer_len); + dp_strlcat(sendbuffer, "\n", sendbuffer_len); return true; } @@ -1813,7 +1856,7 @@ void Curl_SendRequirements(void) // for each requirement, find the pack name char sendbuffer[4096] = ""; requirement *req; - qboolean foundone = false; + qbool foundone = false; const char *p; for(req = requirements; req; req = req->next) @@ -1824,10 +1867,10 @@ void Curl_SendRequirements(void) foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone; if(foundone) - strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer)); + dp_strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer)); if(strlen(sendbuffer) + 1 < sizeof(sendbuffer)) - Host_ClientCommands("%s", sendbuffer); + SV_ClientCommands("%s", sendbuffer); else Con_Printf("Could not initiate autodownload due to URL buffer overflow\n"); }