diff --git a/generic-pc/video.cpp b/generic-pc/video.cpp index e541ac7..341f6e9 100644 --- a/generic-pc/video.cpp +++ b/generic-pc/video.cpp @@ -44,6 +44,7 @@ extern "C" { #include "lt_debug.h" #define lt_debug(args...) _lt_debug(TRIPLE_DEBUG_VIDEO, this, args) #define lt_info(args...) _lt_info(TRIPLE_DEBUG_VIDEO, this, args) +#define lt_info_c(args...) _lt_info(TRIPLE_DEBUG_VIDEO, NULL, args) cVideo *videoDecoder = NULL; extern cDemux *videoDemux; @@ -540,3 +541,107 @@ void cVideo::run(void) buf_out = 0; lt_info("======================== end decoder thread ================================\n"); } + +static bool swscale(unsigned char *src, unsigned char *dst, int sw, int sh, int dw, int dh) +{ + bool ret = false; + struct SwsContext *scale = NULL; + AVFrame *sframe, *dframe; + scale = sws_getCachedContext(scale, sw, sh, PIX_FMT_RGB32, dw, dh, PIX_FMT_RGB32, SWS_BICUBIC, 0, 0, 0); + if (!scale) { + lt_info_c("%s: ERROR setting up SWS context\n", __func__); + return false; + } + sframe = avcodec_alloc_frame(); + dframe = avcodec_alloc_frame(); + if (!sframe || !dframe) { + lt_info_c("%s: could not alloc sframe (%p) or dframe (%p)\n", __func__, sframe, dframe); + goto out; + } + avpicture_fill((AVPicture *)sframe, &(src[0]), PIX_FMT_RGB32, sw, sh); + avpicture_fill((AVPicture *)dframe, &(dst[0]), PIX_FMT_RGB32, dw, dh); + sws_scale(scale, sframe->data, sframe->linesize, 0, sh, dframe->data, dframe->linesize); + out: + avcodec_free_frame(&sframe); + avcodec_free_frame(&dframe); + sws_freeContext(scale); + return ret; +} + +bool cVideo::GetScreenImage(unsigned char * &data, int &xres, int &yres, bool get_video, bool get_osd, bool scale_to_video) +{ + lt_info("%s: data 0x%p xres %d yres %d vid %d osd %d scale %d\n", + __func__, data, xres, yres, get_video, get_osd, scale_to_video); + SWFramebuffer video; + std::vector *osd = NULL; + std::vector s_osd; /* scaled OSD */ + int vid_w = 0, vid_h = 0; + int osd_w = glfb->getOSDWidth(); + int osd_h = glfb->getOSDHeight(); + xres = osd_w; + yres = osd_h; + if (get_video) { + buf_m.lock(); + video = buffers[buf_out]; + buf_m.unlock(); + vid_w = video.width(); + vid_h = video.height(); + if (scale_to_video || !get_osd) { + xres = vid_w; + yres = vid_h; + AVRational a = video.AR(); + /* TODO: this does not consider display_aspect and display_crop */ + if (a.num > 0 && a.den > 0) + xres = vid_w * a.num / a.den; + } + } + if (get_osd) + osd = glfb->getOSDBuffer(); + unsigned int need = avpicture_get_size(PIX_FMT_RGB32, xres, yres); + data = (unsigned char *)realloc(data, need); /* will be freed by caller */ + if (data == NULL) /* out of memory? */ + return false; + + if (get_video) { + if (vid_w != xres || vid_h != yres) /* scale video into data... */ + swscale(&video[0], data, vid_w, vid_h, xres, yres); + else /* get_video and no fancy scaling needed */ + memcpy(data, &video[0], xres * yres * sizeof(uint32_t)); + } + + if (get_osd && (osd_w != xres || osd_h != yres)) { + /* rescale osd */ + s_osd.resize(need); + swscale(&(*osd)[0], &s_osd[0], osd_w, osd_h, xres, yres); + osd = &s_osd; + } + + if (get_video && get_osd) { + /* alpha blend osd onto data (video). TODO: maybe libavcodec can do this? */ + uint32_t *d = (uint32_t *)data; + uint32_t *pixpos = (uint32_t *)&(*osd)[0]; + for (int count = 0; count < yres; count++) { + for (int count2 = 0; count2 < xres; count2++ ) { + uint32_t pix = *pixpos; + if ((pix & 0xff000000) == 0xff000000) + *d = pix; + else { + uint8_t *in = (uint8_t *)(pixpos); + uint8_t *out = (uint8_t *)d; + int a = in[3]; /* TODO: big/little endian? */ + *out = (*out + ((*in - *out) * a) / 256); + in++; out++; + *out = (*out + ((*in - *out) * a) / 256); + in++; out++; + *out = (*out + ((*in - *out) * a) / 256); + } + d++; + pixpos++; + } + } + } + else if (get_osd) /* only get_osd, data is not yet populated */ + memcpy(data, &(*osd)[0], xres * yres * sizeof(uint32_t)); + + return true; +} diff --git a/generic-pc/video_lib.h b/generic-pc/video_lib.h index 493b8a4..55a0d8d 100644 --- a/generic-pc/video_lib.h +++ b/generic-pc/video_lib.h @@ -190,6 +190,7 @@ class cVideo : public OpenThreads::Thread int CloseVBI(void) { return 0; }; int StartVBI(unsigned short) { return 0; }; int StopVBI(void) { return 0; }; + bool GetScreenImage(unsigned char * &data, int &xres, int &yres, bool get_video = true, bool get_osd = false, bool scale_to_video = false); SWFramebuffer *getDecBuf(void); private: void run();