X-Git-Url: http://git.xonotic.org/?a=blobdiff_plain;f=libcurl.c;h=3d8de2b0ac158b871dbf43964d79ea2fa1f99ccc;hb=1857323bffc66bee0eadc20a7ae46889837dcf1b;hp=b228350edfd385f7501ec4d174aed741f5f91773;hpb=85c434c08985337fa476e56fae0fde3e955dbd73;p=xonotic%2Fdarkplaces.git diff --git a/libcurl.c b/libcurl.c index b228350e..3d8de2b0 100644 --- a/libcurl.c +++ b/libcurl.c @@ -7,14 +7,14 @@ #include "jpeg.h" #include "image_png.h" -static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"}; -static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"}; -static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"}; -static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"}; -static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"}; -static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"}; -static cvar_t cl_curl_useragent = {0, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"}; -static cvar_t cl_curl_useragent_append = {0, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"}; +static cvar_t cl_curl_maxdownloads = {CVAR_CLIENT | CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"}; +static cvar_t cl_curl_maxspeed = {CVAR_CLIENT | CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"}; +static cvar_t sv_curl_defaulturl = {CVAR_SERVER | CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"}; +static cvar_t sv_curl_serverpackages = {CVAR_SERVER | CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"}; +static cvar_t sv_curl_maxspeed = {CVAR_SERVER | CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"}; +static cvar_t cl_curl_enabled = {CVAR_CLIENT | CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"}; +static cvar_t cl_curl_useragent = {CVAR_CLIENT, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"}; +static cvar_t cl_curl_useragent_append = {CVAR_CLIENT, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"}; /* ================================================================= @@ -331,16 +331,16 @@ static void Curl_CheckCommandWhenDone(void) if(numdownloads_fail == 0) { Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done); - Cbuf_AddText("\n"); - Cbuf_AddText(command_when_done); - Cbuf_AddText("\n"); + Cbuf_AddText(&cmd_client, "\n"); + Cbuf_AddText(&cmd_client, command_when_done); + Cbuf_AddText(&cmd_client, "\n"); } else { Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error); - Cbuf_AddText("\n"); - Cbuf_AddText(command_when_error); - Cbuf_AddText("\n"); + Cbuf_AddText(&cmd_client, "\n"); + Cbuf_AddText(&cmd_client, command_when_error); + Cbuf_AddText(&cmd_client, "\n"); } Curl_Clear_forthismap(); } @@ -598,7 +598,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error pixels = decode_image(di, content_type); if(pixels) - Draw_NewPic(p, image_width, image_height, true, pixels); + Draw_NewPic(p, image_width, image_height, pixels, TEXTYPE_BGRA, TEXF_ALPHA | TEXF_CLAMP); else CLEAR_AND_RETRY(); } @@ -615,7 +615,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error pixels = decode_image(di, content_type); if(pixels) - R_SkinFrame_LoadInternalBGRA(p, TEXF_FORCE_RELOAD | TEXF_MIPMAP | TEXF_ALPHA, pixels, image_width, image_height, false); // TODO what sRGB argument to put here? + R_SkinFrame_LoadInternalBGRA(p, TEXF_FORCE_RELOAD | TEXF_MIPMAP | TEXF_ALPHA, pixels, image_width, image_height, 0, 0, 0, false); // TODO what sRGB argument to put here? else CLEAR_AND_RETRY(); } @@ -966,20 +966,21 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max // already downloading the file? { - downloadinfo *di = Curl_Find(fn); - if(di) + downloadinfo *existingdownloadinfo = Curl_Find(fn); + if(existingdownloadinfo) { - Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url, urlbuf, sizeof(urlbuf))); + Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(existingdownloadinfo->url, urlbuf, sizeof(urlbuf))); // however, if it was not for this map yet... - if(forthismap && !di->forthismap) + if(forthismap && !existingdownloadinfo->forthismap) { - di->forthismap = true; + existingdownloadinfo->forthismap = true; // this "fakes" a download attempt so the client will wait for // the download to finish and then reconnect ++numdownloads_added; } + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return false; } } @@ -1003,6 +1004,7 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max } } + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return false; } else @@ -1010,10 +1012,10 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max qfile_t *f = FS_OpenRealFile(fn, "rb", false); if(f) { - char buf[4] = {0}; - FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp + char b[4] = {0}; + FS_Read(f, b, sizeof(b)); // no "-1", I will use memcmp - if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4)) + if(memcmp(b, "PK\x03\x04", 4) && memcmp(b, "PACK", 4)) { Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn); FS_Close(f); @@ -1331,7 +1333,7 @@ prints the download list ==================== */ // TODO rewrite using Curl_GetDownloadInfo? -static void Curl_Info_f(void) +static void Curl_Info_f(cmd_state_t *cmd) { downloadinfo *di; char urlbuf[1024]; @@ -1386,7 +1388,7 @@ curl --finish_autodownload once the last download completes successfully, reconnect to the current server ==================== */ -static void Curl_Curl_f(void) +static void Curl_Curl_f(cmd_state_t *cmd) { double maxspeed = 0; int i; @@ -1408,21 +1410,21 @@ static void Curl_Curl_f(void) return; } - if(Cmd_Argc() < 2) + if(Cmd_Argc(cmd) < 2) { Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n"); return; } - url = Cmd_Argv(Cmd_Argc() - 1); - end = Cmd_Argc(); + url = Cmd_Argv(cmd, Cmd_Argc(cmd) - 1); + end = Cmd_Argc(cmd); for(i = 1; i != end; ++i) { - const char *a = Cmd_Argv(i); + const char *a = Cmd_Argv(cmd, i); if(!strcmp(a, "--info")) { - Curl_Info_f(); + Curl_Info_f(cmd); return; } else if(!strcmp(a, "--cancel")) @@ -1455,7 +1457,7 @@ static void Curl_Curl_f(void) { for(i = i + 1; i != end - 1; ++i) { - if(!FS_FileExists(Cmd_Argv(i))) + if(!FS_FileExists(Cmd_Argv(cmd, i))) goto needthefile; // why can't I have a "double break"? } // if we get here, we have all the files... @@ -1470,7 +1472,7 @@ static void Curl_Curl_f(void) if(i < end - 1) { ++i; - name = Cmd_Argv(i); + name = Cmd_Argv(cmd, i); } } else if(!strcmp(a, "--clear_autodownload")) @@ -1524,10 +1526,10 @@ static void curl_curlcat_callback(int code, size_t length_received, unsigned cha Z_Free(buffer); } -void Curl_CurlCat_f(void) +void Curl_CurlCat_f(cmd_state_t *cmd) { unsigned char *buf; - const char *url = Cmd_Argv(1); + const char *url = Cmd_Argv(cmd, 1); buf = Z_Malloc(16384); Curl_Begin_ToMemory(url, buf, 16384, curl_curlcat_callback, NULL); } @@ -1550,8 +1552,9 @@ void Curl_Init_Commands(void) Cvar_RegisterVariable (&sv_curl_maxspeed); Cvar_RegisterVariable (&cl_curl_useragent); Cvar_RegisterVariable (&cl_curl_useragent_append); - Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path"); - //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)"); + Cmd_AddCommand(&cmd_client, "curl", Curl_Curl_f, "download data from an URL and add to search path"); + Cmd_AddCommand(&cmd_clientfromserver, "curl", Curl_Curl_f, "download data from an URL and add to search path"); + //Cmd_AddCommand(&cmd_client, "curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)"); } /*