cvar_t scr_screenshot_gammaboost = {CVAR_SAVE, "scr_screenshot_gammaboost","1", "gamma correction on saved screenshots and videos, 1.0 saves unmodified images"};
// scr_screenshot_name is defined in fs.c
cvar_t cl_capturevideo = {0, "cl_capturevideo", "0", "enables saving of video to a .avi file using uncompressed I420 colorspace and PCM audio, note that scr_screenshot_gammaboost affects the brightness of the output)"};
+cvar_t cl_capturevideo_width = {0, "cl_capturevideo_width", "0", "scales all frames to this resolution before saving the video"};
+cvar_t cl_capturevideo_height = {0, "cl_capturevideo_height", "0", "scales all frames to this resolution before saving the video"};
cvar_t cl_capturevideo_realtime = {0, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over 1 second"};
cvar_t cl_capturevideo_fps = {0, "cl_capturevideo_fps", "30", "how many frames per second to save (29.97 for NTSC, 30 for typical PC video, 15 can be useful)"};
cvar_t cl_capturevideo_number = {CVAR_SAVE, "cl_capturevideo_number", "1", "number to append to video filename, incremented each time a capture begins"};
cvar_t scr_zoomwindow_fov = {CVAR_SAVE, "scr_zoomwindow_fov", "20", "fov of zoom window"};
cvar_t scr_stipple = {0, "scr_stipple", "0", "interlacing-like stippling of the display"};
cvar_t scr_refresh = {0, "scr_refresh", "1", "allows you to completely shut off rendering for benchmarking purposes"};
+cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information, 0 = off, 1 = show client netgraph, 2 = show client and server netgraphs (when hosting a server)"};
int jpeg_supported = false;
void SCR_DrawCenterString (void)
{
char *start;
- int l;
int x, y;
int remaining;
int color;
+ if(cl.intermission == 2) // in finale,
+ if(sb_showscores) // make TAB hide the finale message (sb_showscores overrides finale in sbar.c)
+ return;
+
// the finale prints the characters one at a time, except if printspeed is an absurdly high value
if (cl.intermission && scr_printspeed.value > 0 && scr_printspeed.value < 1000000)
remaining = (int)(scr_printspeed.value * (cl.time - scr_centertime_start));
do
{
// scan the number of characters on the line, not counting color codes
- int chars = 0;
- for (l=0 ; l<vid_conwidth.integer/8 ; l++)
- {
- if (start[l] == '\n' || !start[l])
- break;
- // color codes add no visible characters, so don't count them
- if (start[l] == STRING_COLOR_TAG && (start[l+1] >= '0' && start[l+1] <= '9'))
- l++;
- else
- chars++;
- }
+ char *newline = strchr(start, '\n');
+ int l = newline ? (newline - start) : (int)strlen(start);
+ int chars = COM_StringLengthNoColors(start, l, NULL);
+
x = (vid_conwidth.integer - chars*8)/2;
if (l > 0)
{
if (remaining < l)
l = remaining;
- DrawQ_ColoredString(x, y, start, l, 8, 8, 1, 1, 1, 1, 0, &color);
+ DrawQ_String(x, y, start, l, 8, 8, 1, 1, 1, 1, 0, &color, false);
remaining -= l;
if (remaining <= 0)
return;
}
-
y += 8;
- while (*start && *start != '\n')
- start++;
-
- if (!*start)
+ if (!newline)
break;
- start++; // skip the \n
+ start = newline + 1; // skip the \n
} while (1);
}
SCR_DrawCenterString ();
}
+void SCR_DrawNetGraph_DrawGraph (int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *label, float textsize, int packetcounter, int numparameters, const int **parameters, const float parametercolors[][4])
+{
+ int j, k, x, y, index, offset, height;
+ // draw the bar graph itself
+ // advance the packet counter because it is the latest packet column being
+ // built up and should come last
+ packetcounter = (packetcounter + 1) % NETGRAPH_PACKETS;
+ for (j = 0;j < NETGRAPH_PACKETS;j++)
+ {
+ x = graphx + j * barwidth;
+ y = graphy + barheight;
+ index = (packetcounter + j) % NETGRAPH_PACKETS;
+ if (parameters[0][index] == NETGRAPH_LOSTPACKET)
+ DrawQ_Fill(x, y - barheight, barwidth, barheight, 1, 0, 0, 1, 0);
+ else if (parameters[0][index] == NETGRAPH_CHOKEDPACKET)
+ DrawQ_Fill(x, y - min(2, barheight), barwidth, min(2, barheight), 1, 1, 0, 1, 0);
+ else
+ {
+ offset = 0;
+ for (k = 0;k < numparameters;k++)
+ {
+ height = (parameters[k][index] + bardivide - 1) / bardivide;
+ height = min(height, barheight - offset);
+ offset += height;
+ if (height)
+ DrawQ_Fill(x, y - offset, barwidth, height, parametercolors[k][0], parametercolors[k][1], parametercolors[k][2], parametercolors[k][3], 0);
+ }
+ }
+ }
+}
+
+const float netgraphcolors[3][4] =
+{
+ {1 , 0.5, 0 , 1},
+ {1 , 1 , 1 , 1},
+ {0 , 1 , 0 , 1},
+};
+
+void SCR_DrawNetGraph_DrawConnection_Client (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labelincoming, int separator, const char *labeloutgoing, float textsize)
+{
+ int numparameters;
+ const int *parameters[3];
+ // dim background
+ DrawQ_Fill(graphx , graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+ DrawQ_Fill(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+ // draw the bar graphs
+ numparameters = 3;
+ parameters[0] = conn->incoming_unreliablesize;
+ parameters[1] = conn->incoming_reliablesize;
+ parameters[2] = conn->incoming_acksize;
+ SCR_DrawNetGraph_DrawGraph(graphx, graphy, barwidth, barheight, bardivide, labelincoming, textsize, conn->incoming_packetcounter, numparameters, parameters, netgraphcolors);
+ parameters[0] = conn->outgoing_unreliablesize;
+ parameters[1] = conn->outgoing_reliablesize;
+ parameters[2] = conn->outgoing_acksize;
+ SCR_DrawNetGraph_DrawGraph(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth, barheight, bardivide, labeloutgoing, textsize, conn->outgoing_packetcounter, numparameters, parameters, netgraphcolors);
+ // draw labels
+ DrawQ_String(graphx , graphy + barheight, labelincoming, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+ DrawQ_String(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy + barheight, labeloutgoing, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+}
+
+void SCR_DrawNetGraph_DrawConnection_Server (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labeloutgoing, int separator, const char *labelincoming, float textsize)
+{
+ int numparameters;
+ const int *parameters[3];
+ // dim background
+ DrawQ_Fill(graphx , graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+ DrawQ_Fill(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+ // draw the bar graphs
+ numparameters = 3;
+ parameters[0] = conn->outgoing_unreliablesize;
+ parameters[1] = conn->outgoing_reliablesize;
+ parameters[2] = conn->outgoing_acksize;
+ SCR_DrawNetGraph_DrawGraph(graphx , graphy, barwidth, barheight, bardivide, labeloutgoing, textsize, conn->outgoing_packetcounter, numparameters, parameters, netgraphcolors);
+ parameters[0] = conn->incoming_unreliablesize;
+ parameters[1] = conn->incoming_reliablesize;
+ parameters[2] = conn->incoming_acksize;
+ SCR_DrawNetGraph_DrawGraph(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth, barheight, bardivide, labelincoming, textsize, conn->incoming_packetcounter, numparameters, parameters, netgraphcolors);
+ // draw labels
+ DrawQ_String(graphx , graphy + barheight, labeloutgoing, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+ DrawQ_String(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy + barheight, labelincoming, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+}
+
+/*
+==============
+SCR_DrawNetGraph
+==============
+*/
+void SCR_DrawNetGraph (void)
+{
+ int i, separator1, separator2, barwidth, barheight, bardivide, netgraph_x, netgraph_y, textsize, index, netgraphsperrow;
+
+ if (cls.state != ca_connected)
+ return;
+ if (!cls.netcon)
+ return;
+ if (!shownetgraph.integer)
+ return;
+
+ separator1 = 2;
+ separator2 = 4;
+ textsize = 8;
+ barwidth = 1;
+ barheight = 50;
+ bardivide = 20;
+
+ netgraphsperrow = (vid_conwidth.integer + separator2) / (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+ netgraphsperrow = max(netgraphsperrow, 1);
+
+ index = 0;
+ netgraph_x = (vid_conwidth.integer + separator2) - (1 + (index % netgraphsperrow)) * (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+ netgraph_y = (vid_conheight.integer - 48 + separator2) - (1 + (index / netgraphsperrow)) * (barheight + textsize + separator2);
+ SCR_DrawNetGraph_DrawConnection_Client(cls.netcon, netgraph_x, netgraph_y, barwidth, barheight, bardivide, "incoming", separator1, "outgoing", textsize);
+ index++;
+
+ if (sv.active && shownetgraph.integer >= 2)
+ {
+ for (i = 0;i < svs.maxclients;i++)
+ {
+ if (!svs.clients[i].netconnection)
+ continue;
+ netgraph_x = (vid_conwidth.integer + separator2) - (1 + (index % netgraphsperrow)) * (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+ netgraph_y = (vid_conheight.integer - 48 + separator2) - (1 + (index / netgraphsperrow)) * (barheight + textsize + separator2);
+ SCR_DrawNetGraph_DrawConnection_Server(svs.clients[i].netconnection, netgraph_x, netgraph_y, barwidth, barheight, bardivide, va("%s", svs.clients[i].name), separator1, "", textsize);
+ index++;
+ }
+ }
+}
+
/*
==============
SCR_DrawTurtle
len = (int)strlen(temp);
x = (vid_conwidth.integer - len*size) / 2;
y = vid_conheight.integer - size - offset;
- DrawQ_Pic(0, y, NULL, vid_conwidth.integer, size, 0, 0, 0, 0.5, 0);
- DrawQ_String(x, y, temp, len, size, size, 1, 1, 1, 1, 0);
+ DrawQ_Fill(0, y, vid_conwidth.integer, size, 0, 0, 0, 0.5, 0);
+ DrawQ_String(x, y, temp, len, size, size, 1, 1, 1, 1, 0, NULL, true);
return 8;
}
{
len = (int)strlen(addinfo);
x = (vid_conwidth.integer - len*size) / 2;
- DrawQ_Pic(0, y - size, NULL, vid_conwidth.integer, size, 1, 1, 1, 0.8, 0);
- DrawQ_String(x, y - size, addinfo, len, size, size, 0, 0, 0, 1, 0);
+ DrawQ_Fill(0, y - size, vid_conwidth.integer, size, 1, 1, 1, 0.8, 0);
+ DrawQ_String(x, y - size, addinfo, len, size, size, 0, 0, 0, 1, 0, NULL, true);
}
for(i = 0; i != nDownloads; ++i)
dpsnprintf(temp, sizeof(temp), "Downloading %s ... %5.1f%% @ %.1f KiB/s\n", downinfo[i].filename, 100.0 * downinfo[i].progress, downinfo[i].speed / 1024.0);
len = (int)strlen(temp);
x = (vid_conwidth.integer - len*size) / 2;
- DrawQ_Pic(0, y + i * size, NULL, vid_conwidth.integer, size, 0, 0, 0, 0.8, 0);
- DrawQ_String(x, y + i * size, temp, len, size, size, 1, 1, 1, 1, 0);
+ DrawQ_Fill(0, y + i * size, vid_conwidth.integer, size, 0, 0, 0, 0.8, 0);
+ DrawQ_String(x, y + i * size, temp, len, size, size, 1, 1, 1, 1, 0, NULL, true);
}
Z_Free(downinfo);
Log_Start();
Host_StartVideo();
- S_StopAllSounds();
SCR_UpdateLoadingScreen(false);
}
lines++;
y = vid_conheight.integer - sb_lines - lines * 8;
i = j = 0;
- DrawQ_Pic(0, y, NULL, vid_conwidth.integer, lines * 8, 0, 0, 0, 0.5, 0);
+ DrawQ_Fill(0, y, vid_conwidth.integer, lines * 8, 0, 0, 0, 0.5, 0);
while (r_speeds_string[i])
{
j = i;
while (r_speeds_string[i] && r_speeds_string[i] != '\n')
i++;
if (i - j > 0)
- DrawQ_String(0, y, r_speeds_string + j, i - j, 8, 8, 1, 1, 1, 1, 0);
+ DrawQ_String(0, y, r_speeds_string + j, i - j, 8, 8, 1, 1, 1, 1, 0, NULL, true);
if (r_speeds_string[i] == '\n')
i++;
y += 8;
Cvar_SetValue ("viewsize",scr_viewsize.value-10);
}
+void SCR_CaptureVideo_EndVideo(void);
+void CL_Screen_Shutdown(void)
+{
+ SCR_CaptureVideo_EndVideo();
+}
+
void CL_Screen_Init(void)
{
Cvar_RegisterVariable (&scr_fov);
Cvar_RegisterVariable (&scr_screenshot_jpeg_quality);
Cvar_RegisterVariable (&scr_screenshot_gammaboost);
Cvar_RegisterVariable (&cl_capturevideo);
+ Cvar_RegisterVariable (&cl_capturevideo_width);
+ Cvar_RegisterVariable (&cl_capturevideo_height);
Cvar_RegisterVariable (&cl_capturevideo_realtime);
Cvar_RegisterVariable (&cl_capturevideo_fps);
Cvar_RegisterVariable (&cl_capturevideo_number);
Cvar_RegisterVariable(&scr_zoomwindow_fov);
Cvar_RegisterVariable(&scr_stipple);
Cvar_RegisterVariable(&scr_refresh);
+ Cvar_RegisterVariable(&shownetgraph);
Cmd_AddCommand ("sizeup",SCR_SizeUp_f, "increase view size (increases viewsize cvar)");
Cmd_AddCommand ("sizedown",SCR_SizeDown_f, "decrease view size (decreases viewsize cvar)");
MSG_WriteLong(&cls.capturevideo.riffindexbuffer, chunksize);
}
-static void SCR_CaptureVideo_RIFF_Finish(void)
+static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
{
// close the "movi" list
SCR_CaptureVideo_RIFF_Pop();
// write the idx1 chunk that we've been building while saving the frames
- SCR_CaptureVideo_RIFF_Push("idx1", NULL);
- SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
- SCR_CaptureVideo_RIFF_Pop();
+ if(final && cls.capturevideo.videofile_firstchunkframes_offset)
+ // TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
+ {
+ SCR_CaptureVideo_RIFF_Push("idx1", NULL);
+ SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
+ SCR_CaptureVideo_RIFF_Pop();
+ }
cls.capturevideo.riffindexbuffer.cursize = 0;
// pop the RIFF chunk itself
while (cls.capturevideo.riffstacklevel > 0)
SCR_CaptureVideo_RIFF_Pop();
SCR_CaptureVideo_RIFF_Flush();
+ if(cls.capturevideo.videofile_firstchunkframes_offset)
+ {
+ Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
+ FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+ cls.capturevideo.videofile_firstchunkframes_offset = 0;
+ }
+ else
+ Con_DPrintf("Finishing another chunk (%d frames)\n", cls.capturevideo.frame);
}
static void SCR_CaptureVideo_RIFF_OverflowCheck(int framesize)
{
- fs_offset_t cursize;
+ fs_offset_t cursize, curfilesize;
if (cls.capturevideo.riffstacklevel != 2)
Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
// check where we are in the file
SCR_CaptureVideo_RIFF_Flush();
cursize = SCR_CaptureVideo_RIFF_GetPosition() - cls.capturevideo.riffstackstartoffset[0];
+ curfilesize = SCR_CaptureVideo_RIFF_GetPosition();
+
// if this would overflow the windows limit of 1GB per RIFF chunk, we need
// to close the current RIFF chunk and open another for future frames
if (8 + cursize + framesize + cls.capturevideo.riffindexbuffer.cursize + 8 > 1<<30)
{
- SCR_CaptureVideo_RIFF_Finish();
+ SCR_CaptureVideo_RIFF_Finish(false);
// begin a new 1GB extended section of the AVI
SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX");
SCR_CaptureVideo_RIFF_Push("LIST", "movi");
}
}
+static void FindFraction(double val, int *num, int *denom, int denomMax)
+{
+ int i;
+ double bestdiff;
+ // initialize
+ bestdiff = fabs(val);
+ *num = 0;
+ *denom = 1;
+
+ for(i = 1; i <= denomMax; ++i)
+ {
+ int inum = floor(0.5 + val * i);
+ double diff = fabs(val - inum / (double)i);
+ if(diff < bestdiff)
+ {
+ bestdiff = diff;
+ *num = inum;
+ *denom = i;
+ }
+ }
+}
+
void SCR_CaptureVideo_BeginVideo(void)
{
- double gamma, g;
- int width = vid.width, height = vid.height, x;
+ double gamma, g, aspect;
+ int width = cl_capturevideo_width.integer, height = cl_capturevideo_height.integer;
+ int n, d;
unsigned int i;
if (cls.capturevideo.active)
return;
memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
// soundrate is figured out on the first SoundFrame
+
+ if(width == 0 && height != 0)
+ width = (int) (height * (double)vid.width / ((double)vid.height * vid_pixelheight.value)); // keep aspect
+ if(width != 0 && height == 0)
+ height = (int) (width * ((double)vid.height * vid_pixelheight.value) / (double)vid.width); // keep aspect
+
+ if(width < 2 || width > vid.width) // can't scale up
+ width = vid.width;
+ if(height < 2 || height > vid.height) // can't scale up
+ height = vid.height;
+
+ aspect = vid.width / (vid.height * vid_pixelheight.value);
+
+ // ensure it's all even; if not, scale down a little
+ if(width % 1)
+ --width;
+ if(height % 1)
+ --height;
+
+ cls.capturevideo.width = width;
+ cls.capturevideo.height = height;
cls.capturevideo.active = true;
cls.capturevideo.starttime = realtime;
cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1000);
cls.capturevideo.frame = 0;
cls.capturevideo.soundsampleframe = 0;
cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
- cls.capturevideo.buffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * (3+3+3) + 18);
+ cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * 3);
+ cls.capturevideo.outbuffer = (unsigned char *)Mem_Alloc(tempmempool, width * height * (3+3+3) + 18);
gamma = 1.0/scr_screenshot_gammaboost.value;
dpsnprintf(cls.capturevideo.basename, sizeof(cls.capturevideo.basename), "video/dpvideo%03i", cl_capturevideo_number.integer);
Cvar_SetValueQuick(&cl_capturevideo_number, cl_capturevideo_number.integer + 1);
SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
SCR_CaptureVideo_RIFF_Write32(0x910); // flags (AVIF_HASINDEX | AVIF_ISINTERLEAVED | AVIF_TRUSTCKTYPE)
- cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
+ cls.capturevideo.videofile_firstchunkframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
SCR_CaptureVideo_RIFF_Write32(0); // total frames
SCR_CaptureVideo_RIFF_Write32(0); // initial frames
if (cls.capturevideo.soundrate)
SCR_CaptureVideo_RIFF_Write16(0); // language
SCR_CaptureVideo_RIFF_Write32(0); // initial frames
// find an ideal divisor for the framerate
- for (x = 1;x < 1000;x++)
- if (cls.capturevideo.framerate * x == floor(cls.capturevideo.framerate * x))
- break;
- SCR_CaptureVideo_RIFF_Write32(x); // samples/second divisor
- SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.framerate * x)); // samples/second multiplied by divisor
+ FindFraction(cls.capturevideo.framerate, &n, &d, 1000);
+ SCR_CaptureVideo_RIFF_Write32(d); // samples/second divisor
+ SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
SCR_CaptureVideo_RIFF_Write32(0); // start
- cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+ cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
SCR_CaptureVideo_RIFF_Write32(0); // length
SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
SCR_CaptureVideo_RIFF_Write32(0); // quality
SCR_CaptureVideo_RIFF_Write32(0); // color used
SCR_CaptureVideo_RIFF_Write32(0); // color important
SCR_CaptureVideo_RIFF_Pop();
+ // extended format (aspect!)
+ SCR_CaptureVideo_RIFF_Push("vprp", NULL);
+ SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
+ SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
+ SCR_CaptureVideo_RIFF_Write32(width); // dwHTotalInT
+ SCR_CaptureVideo_RIFF_Write32(height); // dwVTotalInLines
+ FindFraction(aspect, &n, &d, 1000);
+ SCR_CaptureVideo_RIFF_Write32((n << 16) | d); // dwFrameAspectRatio // TODO a word
+ SCR_CaptureVideo_RIFF_Write32(width); // dwFrameWidthInPixels
+ SCR_CaptureVideo_RIFF_Write32(height); // dwFrameHeightInLines
+ SCR_CaptureVideo_RIFF_Write32(1); // nFieldPerFrame
+ SCR_CaptureVideo_RIFF_Write32(width); // CompressedBMWidth
+ SCR_CaptureVideo_RIFF_Write32(height); // CompressedBMHeight
+ SCR_CaptureVideo_RIFF_Write32(width); // ValidBMHeight
+ SCR_CaptureVideo_RIFF_Write32(height); // ValidBMWidth
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffset
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYOffset
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffsetInT
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYValidStartLine
+ SCR_CaptureVideo_RIFF_Pop();
SCR_CaptureVideo_RIFF_Pop();
if (cls.capturevideo.soundrate)
{
SCR_CaptureVideo_RIFF_Pop();
SCR_CaptureVideo_RIFF_Pop();
}
+
+ // extended header (for total #frames)
+ SCR_CaptureVideo_RIFF_Push("LIST", "odml");
+ SCR_CaptureVideo_RIFF_Push("dmlh", NULL);
+ cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0);
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Pop();
+
// close the AVI header list
SCR_CaptureVideo_RIFF_Pop();
// software that produced this AVI video file
{
case CAPTUREVIDEOFORMAT_AVI_I420:
// close any open chunks
- SCR_CaptureVideo_RIFF_Finish();
+ SCR_CaptureVideo_RIFF_Finish(true);
// go back and fix the video frames and audio samples fields
+ Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET);
SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
SCR_CaptureVideo_RIFF_Flush();
cls.capturevideo.videofile = NULL;
}
- if (cls.capturevideo.buffer)
+ if (cls.capturevideo.screenbuffer)
{
- Mem_Free (cls.capturevideo.buffer);
- cls.capturevideo.buffer = NULL;
+ Mem_Free (cls.capturevideo.screenbuffer);
+ cls.capturevideo.screenbuffer = NULL;
+ }
+
+ if (cls.capturevideo.outbuffer)
+ {
+ Mem_Free (cls.capturevideo.outbuffer);
+ cls.capturevideo.outbuffer = NULL;
}
if (cls.capturevideo.riffindexbuffer.data)
}
}
+static void SCR_ScaleDown(unsigned char *in, int inw, int inh, unsigned char *out, int outw, int outh)
+{
+ // TODO optimize this function
+
+ int x, y;
+ float area;
+
+ // memcpy is faster than me
+ if(inw == outw && inh == outh)
+ {
+ memcpy(out, in, 3 * inw * inh);
+ return;
+ }
+
+ // otherwise: a box filter
+ area = (float)outw * (float)outh / (float)inw / (float)inh;
+ for(y = 0; y < outh; ++y)
+ {
+ float iny0 = y / (float)outh * inh; int iny0_i = floor(iny0);
+ float iny1 = (y+1) / (float)outh * inh; int iny1_i = ceil(iny1);
+ for(x = 0; x < outw; ++x)
+ {
+ float inx0 = x / (float)outw * inw; int inx0_i = floor(inx0);
+ float inx1 = (x+1) / (float)outw * inw; int inx1_i = ceil(inx1);
+ float r = 0, g = 0, b = 0;
+ int xx, yy;
+
+ for(yy = iny0_i; yy < iny1_i; ++yy)
+ {
+ float ya = min(yy+1, iny1) - max(iny0, yy);
+ for(xx = inx0_i; xx < inx1_i; ++xx)
+ {
+ float a = ya * (min(xx+1, inx1) - max(inx0, xx));
+ r += a * in[3*(xx + inw * yy)+0];
+ g += a * in[3*(xx + inw * yy)+1];
+ b += a * in[3*(xx + inw * yy)+2];
+ }
+ }
+
+ out[3*(x + outw * y)+0] = r * area;
+ out[3*(x + outw * y)+1] = g * area;
+ out[3*(x + outw * y)+2] = b * area;
+ }
+ }
+}
+
qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
{
- int x = 0, y = 0, width = vid.width, height = vid.height;
+ int x = 0, y = 0, width = cls.capturevideo.width, height = cls.capturevideo.height;
unsigned char *in, *out;
CHECKGLERROR
//return SCR_ScreenShot(filename, cls.capturevideo.buffer, cls.capturevideo.buffer + vid.width * vid.height * 3, cls.capturevideo.buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
if (!cls.capturevideo.videofile)
return false;
// FIXME: width/height must be multiple of 2, enforce this?
- qglReadPixels (x, y, width, height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo.buffer);CHECKGLERROR
- in = cls.capturevideo.buffer;
- out = cls.capturevideo.buffer + width*height*3;
+ qglReadPixels (x, y, vid.width, vid.height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
+ SCR_ScaleDown (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
+ in = cls.capturevideo.outbuffer;
+ out = cls.capturevideo.outbuffer + width*height*3;
SCR_CaptureVideo_ConvertFrame_RGB_to_I420_flip(width, height, in, out);
x = width*height+(width/2)*(height/2)*2;
SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
r_view.width = size;
r_view.height = size;
r_view.depth = 1;
+ r_view.useperspective = true;
r_view.frustum_x = tan(90 * M_PI / 360.0);
r_view.frustum_y = tan(90 * M_PI / 360.0);
R_UpdateVariables();
+ // Quake uses clockwise winding, so these are swapped
+ r_view.cullface_front = GL_BACK;
+ r_view.cullface_back = GL_FRONT;
+
if (cls.signon == SIGNONS)
{
float size;
// this it simply assumes the requested fov is the vertical fov
// for a 4x3 display, if the ratio is not 4x3 this makes the fov
// higher/lower according to the ratio
+ r_view.useperspective = true;
r_view.frustum_y = tan(scr_fov.value * M_PI / 360.0) * (3.0/4.0) * cl.viewzoom;
r_view.frustum_x = r_view.frustum_y * (float)r_view.width / (float)r_view.height / vid_pixelheight.value;
r_view.y = 0;
r_view.z = 0;
+ r_view.useperspective = true;
r_view.frustum_y = tan(scr_zoomwindow_fov.value * M_PI / 360.0) * (3.0/4.0) * cl.viewzoom;
r_view.frustum_x = r_view.frustum_y * vid_pixelheight.value * (float)r_view.width / (float)r_view.height;
r_view.x = 0;
r_view.y = 0;
r_view.z = 0;
+ r_view.useperspective = false;
}
// draw 2D stuff
SHOWLMP_drawall();
SCR_CheckDrawCenterString();
}
+ SCR_DrawNetGraph ();
MR_Draw();
CL_DrawVideo();
R_Shadow_EditLights_DrawSelectedLightProperties();
GL_Color(1,1,1,1);
GL_BlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
GL_DepthRange(0, 1);
+ GL_PolygonOffset(0, 0);
GL_DepthTest(false);
R_Mesh_VertexPointer(vertex3f, 0, 0);
R_Mesh_ColorPointer(NULL, 0, 0);