5 static cvar_t cl_curl_maxdownloads = {1, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
6 static cvar_t cl_curl_maxspeed = {1, "cl_curl_maxspeed","100", "maximum download speed (KiB/s)"};
7 static cvar_t sv_curl_defaulturl = {1, "sv_curl_defaulturl","", "default autodownload source URL"};
8 static cvar_t cl_curl_enabled = {1, "cl_curl_enabled","0", "whether client's download support is enabled"};
11 =================================================================
13 Minimal set of definitions from libcurl
15 WARNING: for a matter of simplicity, several pointer types are
16 casted to "void*", and most enumerated values are not included
18 =================================================================
21 typedef struct CURL_s CURL;
22 typedef struct CURLM_s CURLM;
30 CURLM_CALL_MULTI_PERFORM=-1, /* please call curl_multi_perform() soon */
34 #define CURL_GLOBAL_NOTHING 0
35 #define CURL_GLOBAL_SSL 1
36 #define CURL_GLOBAL_WIN32 2
37 #define CURLOPTTYPE_LONG 0
38 #define CURLOPTTYPE_OBJECTPOINT 10000
39 #define CURLOPTTYPE_FUNCTIONPOINT 20000
40 #define CURLOPTTYPE_OFF_T 30000
41 #define CINIT(name,type,number) CURLOPT_ ## name = CURLOPTTYPE_ ## type + number
44 CINIT(WRITEDATA, OBJECTPOINT, 1),
45 CINIT(URL, OBJECTPOINT, 2),
46 CINIT(ERRORBUFFER, OBJECTPOINT, 10),
47 CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
48 CINIT(REFERER, OBJECTPOINT, 16),
49 CINIT(USERAGENT, OBJECTPOINT, 18),
50 CINIT(RESUME_FROM, LONG, 21),
51 CINIT(FOLLOWLOCATION, LONG, 52), /* use Location: Luke! */
52 CINIT(PRIVATE, OBJECTPOINT, 103),
58 CURLINFO_HEADER_IN, /* 1 */
59 CURLINFO_HEADER_OUT, /* 2 */
60 CURLINFO_DATA_IN, /* 3 */
61 CURLINFO_DATA_OUT, /* 4 */
62 CURLINFO_SSL_DATA_IN, /* 5 */
63 CURLINFO_SSL_DATA_OUT, /* 6 */
67 #define CURLINFO_STRING 0x100000
68 #define CURLINFO_LONG 0x200000
69 #define CURLINFO_DOUBLE 0x300000
70 #define CURLINFO_SLIST 0x400000
71 #define CURLINFO_MASK 0x0fffff
72 #define CURLINFO_TYPEMASK 0xf00000
75 CURLINFO_NONE, /* first, never use this */
76 CURLINFO_EFFECTIVE_URL = CURLINFO_STRING + 1,
77 CURLINFO_RESPONSE_CODE = CURLINFO_LONG + 2,
78 CURLINFO_TOTAL_TIME = CURLINFO_DOUBLE + 3,
79 CURLINFO_NAMELOOKUP_TIME = CURLINFO_DOUBLE + 4,
80 CURLINFO_CONNECT_TIME = CURLINFO_DOUBLE + 5,
81 CURLINFO_PRETRANSFER_TIME = CURLINFO_DOUBLE + 6,
82 CURLINFO_SIZE_UPLOAD = CURLINFO_DOUBLE + 7,
83 CURLINFO_SIZE_DOWNLOAD = CURLINFO_DOUBLE + 8,
84 CURLINFO_SPEED_DOWNLOAD = CURLINFO_DOUBLE + 9,
85 CURLINFO_SPEED_UPLOAD = CURLINFO_DOUBLE + 10,
86 CURLINFO_HEADER_SIZE = CURLINFO_LONG + 11,
87 CURLINFO_REQUEST_SIZE = CURLINFO_LONG + 12,
88 CURLINFO_SSL_VERIFYRESULT = CURLINFO_LONG + 13,
89 CURLINFO_FILETIME = CURLINFO_LONG + 14,
90 CURLINFO_CONTENT_LENGTH_DOWNLOAD = CURLINFO_DOUBLE + 15,
91 CURLINFO_CONTENT_LENGTH_UPLOAD = CURLINFO_DOUBLE + 16,
92 CURLINFO_STARTTRANSFER_TIME = CURLINFO_DOUBLE + 17,
93 CURLINFO_CONTENT_TYPE = CURLINFO_STRING + 18,
94 CURLINFO_REDIRECT_TIME = CURLINFO_DOUBLE + 19,
95 CURLINFO_REDIRECT_COUNT = CURLINFO_LONG + 20,
96 CURLINFO_PRIVATE = CURLINFO_STRING + 21,
97 CURLINFO_HTTP_CONNECTCODE = CURLINFO_LONG + 22,
98 CURLINFO_HTTPAUTH_AVAIL = CURLINFO_LONG + 23,
99 CURLINFO_PROXYAUTH_AVAIL = CURLINFO_LONG + 24,
100 CURLINFO_OS_ERRNO = CURLINFO_LONG + 25,
101 CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26,
102 CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27,
108 CURLMSG_NONE, /* first, not used */
109 CURLMSG_DONE, /* This easy handle has completed. 'result' contains
110 the CURLcode of the transfer */
116 CURLMSG msg; /* what this message means */
117 CURL *easy_handle; /* the handle it concerns */
120 void *whatever; /* message-specific data */
121 CURLcode result; /* return code for transfer */
127 static void (*qcurl_global_init) (long flags);
128 static void (*qcurl_global_cleanup) ();
130 static CURL * (*qcurl_easy_init) ();
131 static void (*qcurl_easy_cleanup) (CURL *handle);
132 static CURLcode (*qcurl_easy_setopt) (CURL *handle, CURLoption option, ...);
133 static CURLcode (*qcurl_easy_getinfo) (CURL *handle, CURLINFO info, ...);
134 static const char * (*qcurl_easy_strerror) (CURLcode);
136 static CURLM * (*qcurl_multi_init) ();
137 static CURLMcode (*qcurl_multi_perform) (CURLM *multi_handle, int *running_handles);
138 static CURLMcode (*qcurl_multi_add_handle) (CURLM *multi_handle, CURL *easy_handle);
139 static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_handle);
140 static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
141 static void (*qcurl_multi_cleanup) (CURLM *);
142 static const char * (*qcurl_multi_strerror) (CURLcode);
144 static dllfunction_t curlfuncs[] =
146 {"curl_global_init", (void **) &qcurl_global_init},
147 {"curl_global_cleanup", (void **) &qcurl_global_cleanup},
148 {"curl_easy_init", (void **) &qcurl_easy_init},
149 {"curl_easy_cleanup", (void **) &qcurl_easy_cleanup},
150 {"curl_easy_setopt", (void **) &qcurl_easy_setopt},
151 {"curl_easy_strerror", (void **) &qcurl_easy_strerror},
152 {"curl_easy_getinfo", (void **) &qcurl_easy_getinfo},
153 {"curl_multi_init", (void **) &qcurl_multi_init},
154 {"curl_multi_perform", (void **) &qcurl_multi_perform},
155 {"curl_multi_add_handle", (void **) &qcurl_multi_add_handle},
156 {"curl_multi_remove_handle",(void **) &qcurl_multi_remove_handle},
157 {"curl_multi_info_read", (void **) &qcurl_multi_info_read},
158 {"curl_multi_cleanup", (void **) &qcurl_multi_cleanup},
159 {"curl_multi_strerror", (void **) &qcurl_multi_strerror},
163 // Handle for CURL DLL
164 static dllhandle_t curl_dll = NULL;
165 // will be checked at many places to find out if qcurl calls are allowed
167 typedef struct downloadinfo_s
169 char filename[MAX_QPATH];
173 fs_offset_t startpos;
177 unsigned long bytes_received;
178 struct downloadinfo_s *next, *prev;
182 static downloadinfo *downloads = NULL;
183 static int numdownloads = 0;
192 static qboolean CURL_OpenLibrary (void)
194 const char* dllnames [] =
200 #elif defined(MACOSX)
213 if (! Sys_LoadLibrary (dllnames, &curl_dll, curlfuncs))
215 Con_Printf ("cURL support disabled\n");
219 Con_Printf ("cURL support enabled\n");
231 static void CURL_CloseLibrary (void)
233 Sys_UnloadLibrary (&curl_dll);
237 static CURLM *curlm = NULL;
238 static unsigned long bytes_received = 0; // used for bandwidth throttling
239 static double curltime = 0;
245 fwrite-compatible function that writes the data to a file. libcurl can call
249 static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi)
252 size_t bytes = size * nmemb;
253 downloadinfo *di = (downloadinfo *) vdi;
255 bytes_received += bytes;
256 di->bytes_received += bytes;
258 ret = FS_Write(di->stream, data, bytes);
260 return ret; // why not ret / nmemb?
265 CURL_DOWNLOAD_SUCCESS = 0,
266 CURL_DOWNLOAD_FAILED,
267 CURL_DOWNLOAD_ABORTED,
268 CURL_DOWNLOAD_SERVERERROR
274 Curl_Clear_forthismap
276 Clears the "will disconnect on failure" flags.
279 void Curl_Clear_forthismap()
282 for(di = downloads; di; di = di->next)
283 di->forthismap = false;
286 static qboolean Curl_Have_forthismap()
289 for(di = downloads; di; di = di->next)
299 stops a download. It receives a status (CURL_DOWNLOAD_SUCCESS,
300 CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error
301 code from libcurl, or 0, if another error has occurred.
304 static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
311 case CURL_DOWNLOAD_SUCCESS:
312 Con_Printf("Download of %s: OK\n", di->filename);
315 case CURL_DOWNLOAD_FAILED:
316 Con_Printf("Download of %s: FAILED\n", di->filename);
318 Con_Printf("Reason given by libcurl: %s\n", qcurl_easy_strerror(error));
320 case CURL_DOWNLOAD_ABORTED:
321 Con_Printf("Download of %s: ABORTED\n", di->filename);
323 case CURL_DOWNLOAD_SERVERERROR:
324 Con_Printf("Download of %s: %d\n", di->filename, (int) error);
326 // reopen to enforce it to have zero bytes again
327 FS_Close(di->stream);
328 di->stream = FS_Open(di->filename, "w", false, false);
335 qcurl_multi_remove_handle(curlm, di->curle);
336 qcurl_easy_cleanup(di->curle);
339 if(ok && !di->bytes_received)
341 Con_Printf("ERROR: empty file\n");
346 FS_Close(di->stream);
350 ok = FS_AddPack(di->filename, NULL, true);
351 if(ok && di->forthismap)
355 if(!ok && di->forthismap)
357 // BAD. Something went totally wrong.
358 // The best we can do is clean up the forthismap flags...
359 Curl_Clear_forthismap();
365 di->prev->next = di->next;
367 downloads = di->next;
369 di->next->prev = di->prev;
377 CheckPendingDownloads
379 checks if there are free download slots to start new downloads in.
380 To not start too many downloads at once, only one download is added at a time,
381 up to a maximum number of cl_curl_maxdownloads are running.
384 static void CheckPendingDownloads()
388 if(numdownloads < cl_curl_maxdownloads.integer)
391 for(di = downloads; di; di = di->next)
395 Con_Printf("Downloading %s -> %s", di->url, di->filename);
397 di->stream = FS_Open(di->filename, "ab", false, false);
400 Con_Printf("\nFAILED: Could not open output file %s\n", di->filename);
401 Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK);
405 FS_Seek(di->stream, 0, SEEK_END);
406 di->startpos = FS_Tell(di->stream);
408 Con_Printf(", resuming from position %ld", (long) di->startpos);
411 di->curle = qcurl_easy_init();
412 qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
413 qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion);
414 qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
415 qcurl_easy_setopt(di->curle, CURLOPT_RESUME_FROM, (long) di->startpos);
416 qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 1);
417 qcurl_easy_setopt(di->curle, CURLOPT_WRITEFUNCTION, CURL_fwrite);
418 qcurl_easy_setopt(di->curle, CURLOPT_WRITEDATA, (void *) di);
419 qcurl_easy_setopt(di->curle, CURLOPT_PRIVATE, (void *) di);
420 qcurl_multi_add_handle(curlm, di->curle);
423 if(numdownloads >= cl_curl_maxdownloads.integer)
434 this function MUST be called before using anything else in this file.
435 On Win32, this must be called AFTER WSAStartup has been done!
443 qcurl_global_init(CURL_GLOBAL_NOTHING);
444 curlm = qcurl_multi_init();
451 Surprise... closes all the stuff. Please do this BEFORE shutting down LHNET.
454 void Curl_ClearRequirements();
459 Curl_ClearRequirements();
469 Finds the internal information block for a download given by file name.
472 static downloadinfo *Curl_Find(const char *filename)
477 for(di = downloads; di; di = di->next)
478 if(!strcasecmp(di->filename, filename))
487 Starts a download of a given URL to the file name portion of this URL (or name
488 if given) in the "dlcache/" folder.
491 void Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap)
502 // Note: This extraction of the file name portion is NOT entirely correct.
504 // It does the following:
506 // http://host/some/script.cgi/SomeFile.pk3?uid=ABCDE -> SomeFile.pk3
507 // http://host/some/script.php?uid=ABCDE&file=/SomeFile.pk3 -> SomeFile.pk3
508 // http://host/some/script.php?uid=ABCDE&file=SomeFile.pk3 -> script.php
510 // However, I'd like to keep this "buggy" behavior so that PHP script
511 // authors can write download scripts without having to enable
512 // AcceptPathInfo on Apache. They just have to ensure that their script
513 // can be called with such a "fake" path name like
514 // http://host/some/script.php?uid=ABCDE&file=/SomeFile.pk3
516 // By the way, such PHP scripts should either send the file or a
517 // "Location:" redirect; PHP code example:
519 // header("Location: http://www.example.com/");
521 // By the way, this will set User-Agent to something like
522 // "Nexuiz build 22:27:55 Mar 17 2006" (engineversion) and Referer to
523 // dp://serverhost:serverport/ so you can filter on this; an example
524 // httpd log file line might be:
526 // 141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
530 p = strrchr(name, '/');
531 p = p ? (p+1) : name;
533 length = q ? (size_t)(q - p) : strlen(p);
534 dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p);
537 // already downloading the file?
539 downloadinfo *di = Curl_Find(fn);
542 Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url);
544 // however, if it was not for this map yet...
546 di->forthismap = true;
552 if(ispak && FS_FileExists(fn))
554 qboolean already_loaded;
555 if(FS_AddPack(fn, &already_loaded, true))
557 Con_DPrintf("%s already exists, not downloading!\n", fn);
559 Con_DPrintf("(pak was already loaded)\n");
567 qfile_t *f = FS_Open(fn, "rb", false, false);
571 FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp
573 if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4))
575 Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn);
577 f = FS_Open(fn, "w", false, false);
590 di = (downloadinfo *) Z_Malloc(sizeof(*di));
591 strlcpy(di->filename, fn, sizeof(di->filename));
592 strlcpy(di->url, URL, sizeof(di->url));
593 dpsnprintf(di->referer, sizeof(di->referer), "dp://%s/", cls.netcon ? cls.netcon->address : "notconnected.invalid");
594 di->forthismap = forthismap;
600 di->bytes_received = 0;
601 di->next = downloads;
614 call this regularily as this will always download as much as possible without
620 if(!cl_curl_enabled.integer)
622 Con_Print("curl support not enabled. Set cl_curl_enabled to 1 to enable.\n");
632 if(realtime < curltime) // throttle
641 mc = qcurl_multi_perform(curlm, &remaining);
643 while(mc == CURLM_CALL_MULTI_PERFORM);
647 CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
650 if(msg->msg == CURLMSG_DONE)
653 CurlStatus failed = CURL_DOWNLOAD_SUCCESS;
656 qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di);
657 result = msg->data.result;
660 failed = CURL_DOWNLOAD_FAILED;
665 qcurl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &code);
670 failed = CURL_DOWNLOAD_SERVERERROR;
676 Curl_EndDownload(di, failed, result);
681 CheckPendingDownloads();
683 // when will we curl the next time?
684 // we will wait a bit to ensure our download rate is kept.
685 // we now know that realtime >= curltime... so set up a new curltime
686 if(cl_curl_maxspeed.value > 0)
688 unsigned long bytes = bytes_received; // maybe smoothen a bit?
689 curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
690 bytes_received -= bytes;
703 void Curl_CancelAll()
710 Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK);
711 // INVARIANT: downloads will point to the next download after that!
719 returns true iff there is a download running.
722 qboolean Curl_Running()
727 return downloads != NULL;
732 Curl_GetDownloadAmount
734 returns a value from 0.0 to 1.0 which represents the downloaded amount of data
735 for the given download.
738 static double Curl_GetDownloadAmount(downloadinfo *di)
745 qcurl_easy_getinfo(di->curle, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &length);
747 return di->bytes_received / length;
757 Curl_GetDownloadSpeed
759 returns the speed of the given download in bytes per second
762 static double Curl_GetDownloadSpeed(downloadinfo *di)
769 qcurl_easy_getinfo(di->curle, CURLINFO_SPEED_DOWNLOAD, &speed);
780 prints the download list
783 // TODO rewrite using Curl_GetDownloadInfo?
784 static void Curl_Info_f()
791 Con_Print("Currently running downloads:\n");
792 for(di = downloads; di; di = di->next)
794 double speed, percent;
795 Con_Printf(" %s -> %s ", di->url, di->filename);
796 percent = 100.0 * Curl_GetDownloadAmount(di);
797 speed = Curl_GetDownloadSpeed(di);
799 Con_Printf("(%.1f%% @ %.1f KiB/s)\n", percent, speed / 1024.0);
801 Con_Print("(queued)\n");
806 Con_Print("No downloads running.\n");
814 implements the "curl" console command
818 curl --cancel filename
823 curl [--pak] [--forthismap] [--for filename filename...] url
824 --pak: after downloading, load the package into the virtual file system
825 --for filename...: only download of at least one of the named files is missing
826 --forthismap: disconnect on failure
829 void Curl_Curl_f(void)
833 qboolean pak = false;
834 qboolean forthismap = false;
836 const char *name = 0;
838 if(!cl_curl_enabled.integer)
844 for(i = 0; i != Cmd_Argc(); ++i)
845 Con_DPrintf("%s ", Cmd_Argv(i));
850 Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
854 url = Cmd_Argv(Cmd_Argc() - 1);
857 for(i = 1; i != end; ++i)
859 const char *a = Cmd_Argv(i);
860 if(!strcmp(a, "--info"))
865 else if(!strcmp(a, "--cancel"))
867 if(i == end - 1) // last argument
871 downloadinfo *di = Curl_Find(url);
872 Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK);
876 else if(!strcmp(a, "--pak"))
880 else if(!strcmp(a, "--for"))
882 for(i = i + 1; i != end - 1; ++i)
884 if(!FS_FileExists(Cmd_Argv(i)))
885 goto needthefile; // why can't I have a "double break"?
887 // if we get here, we have all the files...
890 else if(!strcmp(a, "--forthismap"))
894 else if(!strcmp(a, "--as"))
902 else if(!strcmp(a, "--clear_autodownload"))
904 // mark all running downloads as "not for this map", so if they
905 // fail, it does not matter
906 Curl_Clear_forthismap();
909 else if(!strcmp(a, "--finish_autodownload"))
916 Con_Printf("invalid option %s\n", a);
922 Curl_Begin(url, name, pak, forthismap);
929 loads the commands and cvars this library uses
932 void Curl_Init_Commands(void)
934 Cvar_RegisterVariable (&cl_curl_enabled);
935 Cvar_RegisterVariable (&cl_curl_maxdownloads);
936 Cvar_RegisterVariable (&cl_curl_maxspeed);
937 Cvar_RegisterVariable (&sv_curl_defaulturl);
938 Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
945 returns an array of Curl_downloadinfo_t structs for usage by GUIs.
946 The number of elements in the array is returned in int *nDownloads.
947 const char **additional_info may be set to a string of additional user
948 information, or to NULL if no such display shall occur. The returned
949 array must be freed later using Z_Free.
952 Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info)
956 Curl_downloadinfo_t *downinfo;
957 static char addinfo[128];
963 *additional_info = NULL;
968 for(di = downloads; di; di = di->next)
971 downinfo = (Curl_downloadinfo_t *) Z_Malloc(sizeof(*downinfo) * n);
973 for(di = downloads; di; di = di->next)
975 strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
978 downinfo[i].progress = Curl_GetDownloadAmount(di);
979 downinfo[i].speed = Curl_GetDownloadSpeed(di);
980 downinfo[i].queued = false;
984 downinfo[i].queued = true;
991 // TODO put something better here?
992 // maybe... check if the file is actually needed for the current map?
993 if(Curl_Have_forthismap())
995 dpsnprintf(addinfo, sizeof(addinfo), "please wait for the download to complete");
996 *additional_info = addinfo;
999 *additional_info = NULL;
1008 ====================
1011 finds the URL where to find a given package.
1013 For this, it reads a file "curl_urls.txt" of the following format:
1016 revdm*.pk3 http://revdm/downloads/are/here/
1017 * http://any/other/stuff/is/here/
1019 The URLs should end in /. If not, downloads will still work, but the cached files
1020 can't be just put into the data directory with the same download configuration
1021 (you might want to do this if you want to tag downloaded files from your
1022 server, but you should not). "-" means "don't download".
1024 If no single pattern matched, the cvar sv_curl_defaulturl is used as download
1027 Note: pak1.pak and data*.pk3 are excluded from autodownload at another point in
1028 this file for obvious reasons.
1029 ====================
1031 static const char *Curl_FindPackURL(const char *filename)
1033 static char foundurl[256];
1034 fs_offset_t filesize;
1035 char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize);
1038 // read lines of format "pattern url"
1040 char *pattern = NULL, *patternend = NULL, *url = NULL, *urlend = NULL;
1041 qboolean eof = false;
1053 if(pattern && url && patternend)
1059 if(matchpattern(filename, pattern, true))
1061 strlcpy(foundurl, url, sizeof(foundurl));
1073 if(pattern && !patternend)
1075 else if(url && !urlend)
1081 else if(pattern && patternend && !url)
1090 return sv_curl_defaulturl.string;
1093 typedef struct requirement_s
1095 struct requirement_s *next;
1096 char filename[MAX_QPATH];
1099 static requirement *requirements = NULL;
1103 ====================
1104 Curl_ClearRequirements
1106 Clears the list of required files for playing on the current map.
1107 This should be called at every map change.
1108 ====================
1110 void Curl_ClearRequirements()
1114 requirement *req = requirements;
1115 requirements = requirements->next;
1121 ====================
1124 Adds the given file to the list of requirements.
1125 ====================
1127 void Curl_RequireFile(const char *filename)
1129 requirement *req = (requirement *) Z_Malloc(sizeof(*requirements));
1130 req->next = requirements;
1131 strlcpy(req->filename, filename, sizeof(req->filename));
1136 ====================
1137 Curl_SendRequirements
1139 Makes the current host_clients download all files he needs.
1140 This is done by sending him the following console commands:
1142 curl --start_autodownload
1143 curl --pak --for maps/pushmoddm1.bsp --forthismap http://where/this/darn/map/is/pushmoddm1.pk3
1144 curl --finish_autodownload
1145 ====================
1147 void Curl_SendRequirements()
1149 // for each requirement, find the pack name
1150 char sendbuffer[4096] = "";
1153 strlcat(sendbuffer, "curl --clear_autodownload\n", sizeof(sendbuffer));
1155 for(req = requirements; req; req = req->next)
1158 const char *thispack = FS_WhichPack(req->filename);
1159 const char *packurl;
1164 p = strrchr(thispack, '/');
1168 packurl = Curl_FindPackURL(thispack);
1170 if(packurl && *packurl && strcmp(packurl, "-"))
1172 strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
1173 strlcat(sendbuffer, thispack, sizeof(sendbuffer));
1174 strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
1175 strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
1176 strlcat(sendbuffer, " ", sizeof(sendbuffer));
1177 strlcat(sendbuffer, packurl, sizeof(sendbuffer));
1178 strlcat(sendbuffer, thispack, sizeof(sendbuffer));
1179 strlcat(sendbuffer, "\n", sizeof(sendbuffer));
1183 strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));
1185 if(strlen(sendbuffer) + 1 < sizeof(sendbuffer))
1186 Host_ClientCommands("%s", sendbuffer);
1188 Con_Printf("Could not initiate autodownload due to URL buffer overflow\n");