#include "quakedef.h"
#include "cl_dyntexture.h"
#include "cl_video.h"
-#include "dpvsimpledecode.h"
// cvars
-cvar_t cl_video_subtitles = {CVAR_SAVE, "cl_video_subtitles", "0", "show subtitles for videos (if they are presented)"};
+cvar_t cl_video_subtitles = {CVAR_SAVE, "cl_video_subtitles", "0", "show subtitles for videos (if they are present)"};
cvar_t cl_video_subtitles_lines = {CVAR_SAVE, "cl_video_subtitles_lines", "4", "how many lines to occupy for subtitles"};
cvar_t cl_video_subtitles_textsize = {CVAR_SAVE, "cl_video_subtitles_textsize", "16", "textsize for subtitles"};
cvar_t cl_video_scale = {CVAR_SAVE, "cl_video_scale", "1", "scale of video, 1 = fullscreen, 0.75 - 3/4 of screen etc."};
-cvar_t cl_video_scale_vpos = {CVAR_SAVE, "cl_video_scale_vpos", "0", "vertial align of scaled video, -1 is top, 1 is bottom"};
+cvar_t cl_video_scale_vpos = {CVAR_SAVE, "cl_video_scale_vpos", "0", "vertical align of scaled video, -1 is top, 1 is bottom"};
cvar_t cl_video_stipple = {CVAR_SAVE, "cl_video_stipple", "0", "draw interlacing-like effect on videos, similar to scr_stipple but static and used only with video playing."};
cvar_t cl_video_brightness = {CVAR_SAVE, "cl_video_brightness", "1", "brightness of video, 1 = fullbright, 0.75 - 3/4 etc."};
+cvar_t cl_video_keepaspectratio = {CVAR_SAVE, "cl_video_keepaspectratio", "0", "keeps aspect ratio of fullscreen videos, leaving black color on unfilled areas, a value of 2 let video to be stretched horizontally with top & bottom being sliced out"};
+cvar_t cl_video_fadein = {CVAR_SAVE, "cl_video_fadein", "0", "fading-from-black effect once video is started, in seconds"};
+cvar_t cl_video_fadeout = {CVAR_SAVE, "cl_video_fadeout", "0", "fading-to-black effect once video is ended, in seconds"};
+
+cvar_t v_glslgamma_video = {CVAR_SAVE, "v_glslgamma_video", "1", "applies GLSL gamma to played video, could be a fraction, requires r_glslgamma_2d 1."};
+
+// DPV stream decoder
+#include "dpvsimpledecode.h"
+
+// VorteX: libavcodec implementation
+#include "cl_video_libavw.c"
+
+// JAM video decoder used by Blood Omnicide
+#ifdef JAMVIDEO
+#include "cl_video_jamdecode.c"
+#endif
// constants (and semi-constants)
static int cl_videormask;
static qboolean OpenStream( clvideo_t * video )
{
- char *errorstring;
- video->stream = dpvsimpledecode_open( video->filename, &errorstring);
- if (!video->stream )
- {
- Con_Printf("unable to open \"%s\", error: %s\n", video->filename, errorstring);
- return false;
- }
- return true;
+ const char *errorstring;
+
+ video->stream = dpvsimpledecode_open( video, video->filename, &errorstring);
+ if (video->stream)
+ return true;
+
+#ifdef JAMVIDEO
+ video->stream = jam_open( video, video->filename, &errorstring);
+ if (video->stream)
+ return true;
+#endif
+
+ video->stream = LibAvW_OpenVideo( video, video->filename, &errorstring);
+ if (video->stream)
+ return true;
+
+ Con_Printf("unable to open \"%s\", error: %s\n", video->filename, errorstring);
+ return false;
}
-static void VideoUpdateCallback(rtexture_t *rt, void *data) {
+static void VideoUpdateCallback(rtexture_t *rt, void *data)
+{
clvideo_t *video = (clvideo_t *) data;
- R_UpdateTexture( video->cpif.tex, (unsigned char *)video->imagedata, 0, 0, video->cpif.width, video->cpif.height );
+ R_UpdateTexture( video->cpif.tex, (unsigned char *)video->imagedata, 0, 0, 0, video->cpif.width, video->cpif.height, 1 );
}
-static void LinkVideoTexture( clvideo_t *video ) {
- video->cpif.tex = R_LoadTexture2D( cl_videotexturepool, video->cpif.name,
- video->cpif.width, video->cpif.height, NULL, TEXTYPE_BGRA, TEXF_PERSISTENT | TEXF_ALLOWUPDATES, NULL );
+static void LinkVideoTexture( clvideo_t *video )
+{
+ video->cpif.tex = R_LoadTexture2D( cl_videotexturepool, video->cpif.name, video->cpif.width, video->cpif.height, NULL, TEXTYPE_BGRA, TEXF_PERSISTENT | TEXF_CLAMP, -1, NULL );
R_MakeTextureDynamic( video->cpif.tex, VideoUpdateCallback, video );
CL_LinkDynTexture( video->cpif.name, video->cpif.tex );
}
-static void UnlinkVideoTexture( clvideo_t *video ) {
+static void UnlinkVideoTexture( clvideo_t *video )
+{
CL_UnlinkDynTexture( video->cpif.name );
// free the texture
R_FreeTexture( video->cpif.tex );
+ video->cpif.tex = NULL;
// free the image data
Mem_Free( video->imagedata );
}
static void SuspendVideo( clvideo_t * video )
{
- if( video->suspended )
+ if (video->suspended)
return;
video->suspended = true;
- UnlinkVideoTexture( video );
+ UnlinkVideoTexture(video);
// if we are in firstframe mode, also close the stream
- if( video->state == CLVIDEO_FIRSTFRAME )
- dpvsimpledecode_close( video->stream );
+ if (video->state == CLVIDEO_FIRSTFRAME)
+ {
+ if (video->stream)
+ video->close(video->stream);
+ video->stream = NULL;
+ }
}
static qboolean WakeVideo( clvideo_t * video )
subtitle_text = NULL;
if (langcvar)
{
- dpsnprintf(overridename, sizeof(overridename), "script/locale/%s/%s", langcvar->string, subtitlesfile);
+ dpsnprintf(overridename, sizeof(overridename), "locale/%s/%s", langcvar->string, subtitlesfile);
subtitle_text = (char *)FS_LoadFile(overridename, cls.permanentmempool, false, NULL);
}
if (!subtitle_text)
video->state = CLVIDEO_FIRSTFRAME;
video->framenum = -1;
- video->framerate = dpvsimpledecode_getframerate( video->stream );
+ video->framerate = video->getframerate( video->stream );
video->lasttime = realtime;
video->subtitles = 0;
- video->cpif.width = dpvsimpledecode_getwidth( video->stream );
- video->cpif.height = dpvsimpledecode_getheight( video->stream );
+ video->cpif.width = video->getwidth( video->stream );
+ video->cpif.height = video->getheight( video->stream );
video->imagedata = Mem_Alloc( cls.permanentmempool, video->cpif.width * video->cpif.height * cl_videobytesperpixel );
LinkVideoTexture( video );
return NULL;
}
-void CL_SetVideoState( clvideo_t *video, clvideostate_t state )
+void CL_SetVideoState(clvideo_t *video, clvideostate_t state)
{
- if( !video )
+ if (!video)
return;
video->lasttime = realtime;
video->state = state;
- if( state == CLVIDEO_FIRSTFRAME )
- CL_RestartVideo( video );
+ if (state == CLVIDEO_FIRSTFRAME)
+ CL_RestartVideo(video);
}
-void CL_RestartVideo( clvideo_t *video )
+void CL_RestartVideo(clvideo_t *video)
{
- if( !video )
+ if (!video)
return;
+ // reset time
video->starttime = video->lasttime = realtime;
video->framenum = -1;
- dpvsimpledecode_close( video->stream );
- if( !OpenStream( video ) )
+ // reopen stream
+ if (video->stream)
+ video->close(video->stream);
+ video->stream = NULL;
+ if (!OpenStream(video))
video->state = CLVIDEO_UNUSED;
}
-void CL_CloseVideo( clvideo_t * video )
+// close video
+void CL_CloseVideo(clvideo_t * video)
{
int i;
- if( !video || video->state == CLVIDEO_UNUSED )
+ if (!video || video->state == CLVIDEO_UNUSED)
return;
- if( !video->suspended || video->state != CLVIDEO_FIRSTFRAME )
- dpvsimpledecode_close( video->stream );
- if( !video->suspended )
- UnlinkVideoTexture( video );
+ // close stream
+ if (!video->suspended || video->state != CLVIDEO_FIRSTFRAME)
+ {
+ if (video->stream)
+ video->close(video->stream);
+ video->stream = NULL;
+ }
+ // unlink texture
+ if (!video->suspended)
+ UnlinkVideoTexture(video);
+ // purge subtitles
if (video->subtitles)
{
for (i = 0; i < video->subtitles; i++)
Z_Free( video->subtitle_text[i] );
video->subtitles = 0;
}
-
video->state = CLVIDEO_UNUSED;
}
-static void VideoFrame( clvideo_t *video )
+// update all videos
+void CL_Video_Frame(void)
{
+ clvideo_t *video;
int destframe;
-
- if( video->state == CLVIDEO_FIRSTFRAME )
- destframe = 0;
- else
- destframe = (int)((realtime - video->starttime) * video->framerate);
- if( destframe < 0 )
- destframe = 0;
- if( video->framenum < destframe ) {
- do {
- video->framenum++;
- if( dpvsimpledecode_video( video->stream, video->imagedata, cl_videormask,
- cl_videogmask, cl_videobmask, cl_videobytesperpixel,
- cl_videobytesperpixel * video->cpif.width )
- ) { // finished?
- CL_RestartVideo( video );
- if( video->state == CLVIDEO_PLAY )
- video->state = CLVIDEO_FIRSTFRAME;
- return;
- }
- } while( video->framenum < destframe );
- R_MarkDirtyTexture( video->cpif.tex );
- }
-}
-
-void CL_Video_Frame( void ) // update all videos
-{
int i;
- clvideo_t *video;
if (!cl_num_videos)
return;
-
- for( video = cl_videos, i = 0 ; i < cl_num_videos ; video++, i++ )
- if( video->state != CLVIDEO_UNUSED && !video->suspended )
+ for (video = cl_videos, i = 0 ; i < cl_num_videos ; video++, i++)
+ {
+ if (video->state != CLVIDEO_UNUSED && !video->suspended)
{
- if( realtime - video->lasttime > CLTHRESHOLD )
- SuspendVideo( video );
- else if( video->state == CLVIDEO_PAUSE )
+ if (realtime - video->lasttime > CLTHRESHOLD)
+ {
+ SuspendVideo(video);
+ continue;
+ }
+ if (video->state == CLVIDEO_PAUSE)
+ {
video->starttime = realtime - video->framenum * video->framerate;
+ continue;
+ }
+ // read video frame from stream if time has come
+ if (video->state == CLVIDEO_FIRSTFRAME )
+ destframe = 0;
else
- VideoFrame( video );
+ destframe = (int)((realtime - video->starttime) * video->framerate);
+ if (destframe < 0)
+ destframe = 0;
+ if (video->framenum < destframe)
+ {
+ do {
+ video->framenum++;
+ if (video->decodeframe(video->stream, video->imagedata, cl_videormask, cl_videogmask, cl_videobmask, cl_videobytesperpixel, cl_videobytesperpixel * video->cpif.width))
+ {
+ // finished?
+ CL_RestartVideo(video);
+ if (video->state == CLVIDEO_PLAY)
+ video->state = CLVIDEO_FIRSTFRAME;
+ return;
+ }
+ } while(video->framenum < destframe);
+ R_MarkDirtyTexture(video->cpif.tex);
+ }
}
+ }
- if( cl_videos->state == CLVIDEO_FIRSTFRAME )
+ // stop main video
+ if (cl_videos->state == CLVIDEO_FIRSTFRAME)
CL_VideoStop();
// reduce range to exclude unnecessary entries
- while (cl_num_videos > 0 && cl_videos[cl_num_videos-1].state == CLVIDEO_UNUSED)
+ while(cl_num_videos > 0 && cl_videos[cl_num_videos-1].state == CLVIDEO_UNUSED)
cl_num_videos--;
}
-void CL_Video_Shutdown( void )
-{
- int i;
- for( i = 0 ; i < cl_num_videos ; i++ )
- CL_CloseVideo( &cl_videos[ i ] );
-}
-
void CL_PurgeOwner( int owner )
{
int i;
- for( i = 0 ; i < cl_num_videos ; i++ )
- if( cl_videos[ i ].ownertag == owner )
- CL_CloseVideo( &cl_videos[ i ] );
+
+ for (i = 0 ; i < cl_num_videos ; i++)
+ if (cl_videos[i].ownertag == owner)
+ CL_CloseVideo(&cl_videos[i]);
}
typedef struct
}
cl_video_subtitle_info_t;
-float CL_DrawVideo_WordWidthFunc(void *passthrough, const char *w, size_t *length, float maxWidth)
+static float CL_DrawVideo_WordWidthFunc(void *passthrough, const char *w, size_t *length, float maxWidth)
{
cl_video_subtitle_info_t *si = (cl_video_subtitle_info_t *) passthrough;
return 0;
}
-int CL_DrawVideo_DisplaySubtitleLine(void *passthrough, const char *line, size_t length, float width, qboolean isContinuation)
+static int CL_DrawVideo_DisplaySubtitleLine(void *passthrough, const char *line, size_t length, float width, qboolean isContinuation)
{
cl_video_subtitle_info_t *si = (cl_video_subtitle_info_t *) passthrough;
void CL_DrawVideo(void)
{
clvideo_t *video;
- float videotime;
+ float videotime, px, py, sx, sy, st[8], b;
cl_video_subtitle_info_t si;
int i;
if (cl_video_brightness.value <= 0 || cl_video_brightness.value > 10)
Cvar_SetValueQuick( &cl_video_brightness, 1);
+ // calc video proportions
+ px = 0;
+ py = 0;
+ sx = vid_conwidth.integer;
+ sy = vid_conheight.integer;
+ st[0] = 0.0; st[1] = 0.0;
+ st[2] = 1.0; st[3] = 0.0;
+ st[4] = 0.0; st[5] = 1.0;
+ st[6] = 1.0; st[7] = 1.0;
+ if (cl_video_keepaspectratio.integer)
+ {
+ float a = video->getaspectratio(video->stream) / ((float)vid.width / (float)vid.height);
+ if (cl_video_keepaspectratio.integer >= 2)
+ {
+ // clip instead of scale
+ if (a < 1.0) // clip horizontally
+ {
+ st[1] = st[3] = (1 - a)*0.5;
+ st[5] = st[7] = 1 - (1 - a)*0.5;
+ }
+ else if (a > 1.0) // clip vertically
+ {
+ st[0] = st[4] = (1 - 1/a)*0.5;
+ st[2] = st[6] = (1/a)*0.5;
+ }
+ }
+ else if (a < 1.0) // scale horizontally
+ {
+ px += sx * (1 - a) * 0.5;
+ sx *= a;
+ }
+ else if (a > 1.0) // scale vertically
+ {
+ a = 1 / a;
+ py += sy * (1 - a);
+ sy *= a;
+ }
+ }
+
+ if (cl_video_scale.value != 1)
+ {
+ px += sx * (1 - cl_video_scale.value) * 0.5;
+ py += sy * (1 - cl_video_scale.value) * ((bound(-1, cl_video_scale_vpos.value, 1) + 1) / 2);
+ sx *= cl_video_scale.value;
+ sy *= cl_video_scale.value;
+ }
+
+ // calc brightness for fadein and fadeout effects
+ b = cl_video_brightness.value;
+ if (cl_video_fadein.value && (realtime - video->starttime) < cl_video_fadein.value)
+ b = pow((realtime - video->starttime)/cl_video_fadein.value, 2);
+ else if (cl_video_fadeout.value && ((video->starttime + video->framenum * video->framerate) - realtime) < cl_video_fadeout.value)
+ b = pow(((video->starttime + video->framenum * video->framerate) - realtime)/cl_video_fadeout.value, 2);
+
// draw black bg in case stipple is active or video is scaled
- if (cl_video_stipple.integer || cl_video_scale.value != 1)
+ if (cl_video_stipple.integer || px != 0 || py != 0 || sx != vid_conwidth.integer || sy != vid_conheight.integer)
DrawQ_Fill(0, 0, vid_conwidth.integer, vid_conheight.integer, 0, 0, 0, 1, 0);
+#ifndef USE_GLES2
// enable video-only polygon stipple (of global stipple is not active)
if (qglPolygonStipple && !scr_stipple.integer && cl_video_stipple.integer)
{
GLubyte stipple[128];
- int i, s, width, parts;
+ int s, width, parts;
s = cl_video_stipple.integer;
parts = (s & 007);
}
qglPolygonStipple(stipple);CHECKGLERROR
}
+#endif
// draw video
- if (cl_video_scale.value == 1)
- DrawQ_Pic(0, 0, &video->cpif, vid_conwidth.integer, vid_conheight.integer, cl_video_brightness.value, cl_video_brightness.value, cl_video_brightness.value, 1, 0);
+ if (v_glslgamma_video.value >= 1)
+ DrawQ_SuperPic(px, py, &video->cpif, sx, sy, st[0], st[1], b, b, b, 1, st[2], st[3], b, b, b, 1, st[4], st[5], b, b, b, 1, st[6], st[7], b, b, b, 1, 0);
else
{
- int px = (int)(vid_conwidth.integer * (1 - cl_video_scale.value) * 0.5);
- int py = (int)(vid_conheight.integer * (1 - cl_video_scale.value) * ((bound(-1, cl_video_scale_vpos.value, 1) + 1) / 2));
- int sx = (int)(vid_conwidth.integer * cl_video_scale.value);
- int sy = (int)(vid_conheight.integer * cl_video_scale.value);
- DrawQ_Pic(px, py, &video->cpif, sx , sy, cl_video_brightness.value, cl_video_brightness.value, cl_video_brightness.value, 1, 0);
+ DrawQ_SuperPic(px, py, &video->cpif, sx, sy, st[0], st[1], b, b, b, 1, st[2], st[3], b, b, b, 1, st[4], st[5], b, b, b, 1, st[6], st[7], b, b, b, 1, DRAWFLAG_NOGAMMA);
+ if (v_glslgamma_video.value > 0.0)
+ DrawQ_SuperPic(px, py, &video->cpif, sx, sy, st[0], st[1], b, b, b, v_glslgamma_video.value, st[2], st[3], b, b, b, v_glslgamma_video.value, st[4], st[5], b, b, b, v_glslgamma_video.value, st[6], st[7], b, b, b, v_glslgamma_video.value, 0);
}
+#ifndef USE_GLES2
// disable video-only stipple
if (qglPolygonStipple && !scr_stipple.integer && cl_video_stipple.integer)
+ {
qglDisable(GL_POLYGON_STIPPLE);CHECKGLERROR
+ }
+#endif
// VorteX: draw subtitle_text
if (!video->subtitles || !cl_video_subtitles.integer)
void CL_VideoStart(char *filename, const char *subtitlesfile)
{
+ char vabuf[1024];
Host_StartVideo();
if( cl_videos->state != CLVIDEO_UNUSED )
CL_CloseVideo( cl_videos );
// already contains video/
- if( !OpenVideo( cl_videos, filename, va( CLDYNTEXTUREPREFIX "%s", filename ), 0, subtitlesfile ) )
+ if( !OpenVideo( cl_videos, filename, va(vabuf, sizeof(vabuf), CLDYNTEXTUREPREFIX "%s", filename ), 0, subtitlesfile ) )
return;
// expand the active range to include the new entry
cl_num_videos = max(cl_num_videos, 1);
static void CL_PlayVideo_f(void)
{
char name[MAX_QPATH], subtitlesfile[MAX_QPATH];
+ const char *extension;
Host_StartVideo();
+ if (COM_CheckParm("-benchmark"))
+ return;
+
if (Cmd_Argc() < 2)
{
Con_Print("usage: playvideo <videoname> [custom_subtitles_file]\nplays video named video/<videoname>.dpv\nif custom subtitles file is not presented\nit tries video/<videoname>.sub");
return;
}
- dpsnprintf(name, sizeof(name), "video/%s.dpv", Cmd_Argv(1));
+ extension = FS_FileExtension(Cmd_Argv(1));
+ if (extension[0])
+ dpsnprintf(name, sizeof(name), "video/%s", Cmd_Argv(1));
+ else
+ dpsnprintf(name, sizeof(name), "video/%s.dpv", Cmd_Argv(1));
if ( Cmd_Argc() > 2)
CL_VideoStart(name, Cmd_Argv(2));
else
static void cl_video_shutdown( void )
{
- // TODO: unlink video textures?
+ int i;
+ clvideo_t *video;
+
+ for( video = cl_videos, i = 0 ; i < cl_num_videos ; i++, video++ )
+ if( video->state != CLVIDEO_UNUSED && !video->suspended )
+ SuspendVideo( video );
R_FreeTexturePool( &cl_videotexturepool );
}
Cvar_RegisterVariable(&cl_video_scale_vpos);
Cvar_RegisterVariable(&cl_video_brightness);
Cvar_RegisterVariable(&cl_video_stipple);
+ Cvar_RegisterVariable(&cl_video_keepaspectratio);
+ Cvar_RegisterVariable(&cl_video_fadein);
+ Cvar_RegisterVariable(&cl_video_fadeout);
+
+ Cvar_RegisterVariable(&v_glslgamma_video);
- R_RegisterModule( "CL_Video", cl_video_start, cl_video_shutdown, cl_video_newmap );
+ R_RegisterModule( "CL_Video", cl_video_start, cl_video_shutdown, cl_video_newmap, NULL, NULL );
+
+ LibAvW_OpenLibrary();
}
+void CL_Video_Shutdown( void )
+{
+ int i;
+
+ for (i = 0 ; i < cl_num_videos ; i++)
+ CL_CloseVideo(&cl_videos[ i ]);
+
+ LibAvW_CloseLibrary();
+}