- formatting code using astyle

Conflicts:
	libarmbox/dmx.cpp
	libgeneric-pc/video_lib.h
	libspark/dmx.cpp

Signed-off-by: Thilo Graf <dbt@novatux.de>
This commit is contained in:
svenhoefer
2021-05-17 23:47:39 +02:00
committed by Thilo Graf
parent 42264ba4af
commit 7e5b1fc5d2
161 changed files with 13043 additions and 11396 deletions

View File

@@ -43,7 +43,7 @@ extern "C" {
/* my own buf 16k */
#define DMX_BUF_SZ 0x4000
cAudio * audioDecoder = NULL;
cAudio *audioDecoder = NULL;
extern cDemux *audioDemux;
static uint8_t *dmxbuf = NULL;
static int bufpos;
@@ -163,9 +163,9 @@ int cAudio::PrepareClipPlay(int ch, int srate, int bits, int le)
adevice = ao_open_live(driver, &sformat, NULL);
ao_info *ai = ao_driver_info(driver);
hal_info("%s: changed params ch %d srate %d bits %d le %d adevice %p\n",
__func__, ch, srate, bits, le, adevice);;
__func__, ch, srate, bits, le, adevice);;
hal_info("libao driver: %d name '%s' short '%s' author '%s'\n",
driver, ai->name, ai->short_name, ai->author);
driver, ai->name, ai->short_name, ai->author);
}
return 0;
};
@@ -173,7 +173,8 @@ int cAudio::PrepareClipPlay(int ch, int srate, int bits, int le)
int cAudio::WriteClip(unsigned char *buffer, int size)
{
hal_debug("cAudio::%s buf 0x%p size %d\n", __func__, buffer, size);
if (!adevice) {
if (!adevice)
{
hal_info("%s: adevice not opened?\n", __func__);
return 0;
}
@@ -187,7 +188,8 @@ int cAudio::StopClip()
#if 0
/* don't do anything - closing / reopening ao all the time makes for long delays
* reinit on-demand (e.g. for changed parameters) instead */
if (!adevice) {
if (!adevice)
{
hal_info("%s: adevice not opened?\n", __func__);
return 0;
}
@@ -200,13 +202,15 @@ int cAudio::StopClip()
void cAudio::getAudioInfo(int &type, int &layer, int &freq, int &bitrate, int &mode)
{
type = 0;
layer = 0; /* not used */
layer = 0; /* not used */
freq = 0;
bitrate = 0; /* not used, but easy to get :-) */
mode = 0; /* default: stereo */
bitrate = 0; /* not used, but easy to get :-) */
mode = 0; /* default: stereo */
printf("cAudio::getAudioInfo c %p\n", c);
if (c) {
switch (c->codec_id) {
if (c)
{
switch (c->codec_id)
{
case AV_CODEC_ID_MP2:
type = AUDIO_FMT_MPEG;
break;
@@ -236,40 +240,42 @@ void cAudio::getAudioInfo(int &type, int &layer, int &freq, int &bitrate, int &m
bitrate = c->bit_rate;
if (c->channels == 1)
mode = 3; /* for AV_CODEC_ID_MP2, only stereo / mono is detected for now */
if (c->codec_id != AV_CODEC_ID_MP2) {
switch (c->channel_layout) {
if (c->codec_id != AV_CODEC_ID_MP2)
{
switch (c->channel_layout)
{
case AV_CH_LAYOUT_MONO:
mode = 1; // "C"
mode = 1; // "C"
break;
case AV_CH_LAYOUT_STEREO:
mode = 2; // "L/R"
mode = 2; // "L/R"
break;
case AV_CH_LAYOUT_2_1:
case AV_CH_LAYOUT_SURROUND:
mode = 3; // "L/C/R"
mode = 3; // "L/C/R"
break;
case AV_CH_LAYOUT_2POINT1:
mode = 4; // "L/R/S"
mode = 4; // "L/R/S"
break;
case AV_CH_LAYOUT_3POINT1:
mode = 5; // "L/C/R/S"
mode = 5; // "L/C/R/S"
break;
case AV_CH_LAYOUT_2_2:
case AV_CH_LAYOUT_QUAD:
mode = 6; // "L/R/SL/SR"
mode = 6; // "L/R/SL/SR"
break;
case AV_CH_LAYOUT_5POINT0:
case AV_CH_LAYOUT_5POINT1:
mode = 7; // "L/C/R/SL/SR"
mode = 7; // "L/C/R/SL/SR"
break;
default:
hal_info("%s: unknown ch_layout 0x%" PRIx64 "\n",
__func__, c->channel_layout);
__func__, c->channel_layout);
}
}
}
hal_debug("%s t: %d l: %d f: %d b: %d m: %d codec_id: %x\n",
__func__, type, layer, freq, bitrate, mode, c?c->codec_id:-1);
__func__, type, layer, freq, bitrate, mode, c ? c->codec_id : -1);
};
void cAudio::SetSRS(int /*iq_enable*/, int /*nmgr_enable*/, int /*iq_mode*/, int /*iq_level*/)
@@ -310,8 +316,10 @@ static int _my_read(void *, uint8_t *buf, int buf_size)
int cAudio::my_read(uint8_t *buf, int buf_size)
{
int tmp = 0;
if (audioDecoder && bufpos < DMX_BUF_SZ - 4096) {
while (bufpos < buf_size && ++tmp < 20) { /* retry max 20 times */
if (audioDecoder && bufpos < DMX_BUF_SZ - 4096)
{
while (bufpos < buf_size && ++tmp < 20) /* retry max 20 times */
{
int ret = audioDemux->Read(dmxbuf + bufpos, DMX_BUF_SZ - bufpos, 10);
if (ret > 0)
bufpos += ret;
@@ -322,7 +330,8 @@ int cAudio::my_read(uint8_t *buf, int buf_size)
if (bufpos == 0)
return 0;
//hal_info("%s buf_size %d bufpos %d th %d tmp %d\n", __func__, buf_size, bufpos, thread_started, tmp);
if (bufpos > buf_size) {
if (bufpos > buf_size)
{
memcpy(buf, dmxbuf, buf_size);
memmove(dmxbuf, dmxbuf + buf_size, bufpos - buf_size);
bufpos -= buf_size;
@@ -367,18 +376,19 @@ void cAudio::run()
av_init_packet(&avpkt);
inp = av_find_input_format("mpegts");
AVIOContext *pIOCtx = avio_alloc_context(inbuf, INBUF_SIZE, // internal Buffer and its size
0, // bWriteable (1=true,0=false)
NULL, // user data; will be passed to our callback functions
_my_read, // read callback
NULL, // write callback
NULL); // seek callback
0, // bWriteable (1=true,0=false)
NULL, // user data; will be passed to our callback functions
_my_read, // read callback
NULL, // write callback
NULL); // seek callback
avfc = avformat_alloc_context();
avfc->pb = pIOCtx;
avfc->iformat = inp;
avfc->probesize = 188*5;
avfc->probesize = 188 * 5;
thread_started = true;
if (avformat_open_input(&avfc, NULL, inp, NULL) < 0) {
if (avformat_open_input(&avfc, NULL, inp, NULL) < 0)
{
hal_info("%s: avformat_open_input() failed.\n", __func__);
goto out;
}
@@ -394,31 +404,35 @@ void cAudio::run()
hal_info("%s: stream 0 no audio codec? 0x%x\n", __func__, p->codec_type);
codec = avcodec_find_decoder(p->codec_id);
if (!codec) {
if (!codec)
{
hal_info("%s: Codec for %s not found\n", __func__, avcodec_get_name(p->codec_id));
goto out;
}
if (c)
av_free(c);
c = avcodec_alloc_context3(codec);
if (avcodec_open2(c, codec, NULL) < 0) {
if (avcodec_open2(c, codec, NULL) < 0)
{
hal_info("%s: avcodec_open2() failed\n", __func__);
goto out;
}
if(p->sample_rate == 0 || p->channels == 0){
if (p->sample_rate == 0 || p->channels == 0)
{
av_get_sample_fmt_string(tmp, sizeof(tmp), c->sample_fmt);
hal_info("Header missing %s, sample_fmt %d (%s) sample_rate %d channels %d\n",avcodec_get_name(p->codec_id), c->sample_fmt, tmp, p->sample_rate, p->channels);
hal_info("Header missing %s, sample_fmt %d (%s) sample_rate %d channels %d\n", avcodec_get_name(p->codec_id), c->sample_fmt, tmp, p->sample_rate, p->channels);
goto out2;
}
frame = av_frame_alloc();
if (!frame) {
if (!frame)
{
hal_info("%s: av_frame_alloc failed\n", __func__);
goto out2;
}
/* output sample rate, channels, layout could be set here if necessary */
o_ch = p->channels; /* 2 */
o_sr = p->sample_rate; /* 48000 */
o_layout = p->channel_layout; /* AV_CH_LAYOUT_STEREO */
o_ch = p->channels; /* 2 */
o_sr = p->sample_rate; /* 48000 */
o_layout = p->channel_layout; /* AV_CH_LAYOUT_STEREO */
if (sformat.channels != o_ch || sformat.rate != o_sr ||
sformat.byte_format != AO_FMT_NATIVE || sformat.bits != 16 || adevice == NULL)
{
@@ -433,10 +447,10 @@ void cAudio::run()
adevice = ao_open_live(driver, &sformat, NULL);
ai = ao_driver_info(driver);
hal_info("%s: changed params ch %d srate %d bits %d adevice %p\n",
__func__, o_ch, o_sr, 16, adevice);
if(ai)
__func__, o_ch, o_sr, 16, adevice);
if (ai)
hal_info("libao driver: %d name '%s' short '%s' author '%s'\n",
driver, ai->name, ai->short_name, ai->author);
driver, ai->name, ai->short_name, ai->author);
}
#if 0
hal_info(" driver options:");
@@ -446,17 +460,19 @@ void cAudio::run()
#endif
av_get_sample_fmt_string(tmp, sizeof(tmp), c->sample_fmt);
hal_info("decoding %s, sample_fmt %d (%s) sample_rate %d channels %d\n",
avcodec_get_name(p->codec_id), c->sample_fmt, tmp, p->sample_rate, p->channels);
avcodec_get_name(p->codec_id), c->sample_fmt, tmp, p->sample_rate, p->channels);
swr = swr_alloc_set_opts(swr,
o_layout, AV_SAMPLE_FMT_S16, o_sr, /* output */
p->channel_layout, c->sample_fmt, p->sample_rate, /* input */
0, NULL);
if (! swr) {
o_layout, AV_SAMPLE_FMT_S16, o_sr, /* output */
p->channel_layout, c->sample_fmt, p->sample_rate, /* input */
0, NULL);
if (! swr)
{
hal_info("could not alloc resample context\n");
goto out3;
}
swr_init(swr);
while (thread_started) {
while (thread_started)
{
int gotframe = 0;
if (av_read_frame(avfc, &avpkt) < 0)
break;
@@ -464,26 +480,35 @@ void cAudio::run()
avcodec_decode_audio4(c, frame, &gotframe, &avpkt);
#else
av_ret = avcodec_send_packet(c, &avpkt);
if (av_ret != 0 && av_ret != AVERROR(EAGAIN)) {
if (av_ret != 0 && av_ret != AVERROR(EAGAIN))
{
hal_info("%s: avcodec_send_packet %d\n", __func__, av_ret);
}else {
}
else
{
av_ret = avcodec_receive_frame(c, frame);
if (av_ret != 0 && av_ret != AVERROR(EAGAIN)) {
if (av_ret != 0 && av_ret != AVERROR(EAGAIN))
{
hal_info("%s: avcodec_send_packet %d\n", __func__, av_ret);
}else {
}
else
{
gotframe = 1;
}
}
#endif
if (gotframe && thread_started) {
if (gotframe && thread_started)
{
int out_linesize;
obuf_sz = av_rescale_rnd(swr_get_delay(swr, p->sample_rate) + frame->nb_samples, o_sr, p->sample_rate, AV_ROUND_UP);
if (obuf_sz > obuf_sz_max) {
if (obuf_sz > obuf_sz_max)
{
hal_info("obuf_sz: %d old: %d\n", obuf_sz, obuf_sz_max);
av_free(obuf);
if (av_samples_alloc(&obuf, &out_linesize, o_ch,
frame->nb_samples, AV_SAMPLE_FMT_S16, 1) < 0) {
frame->nb_samples, AV_SAMPLE_FMT_S16, 1) < 0)
{
hal_info("av_samples_alloc failed\n");
av_packet_unref(&avpkt);
break; /* while (thread_started) */
@@ -491,13 +516,13 @@ void cAudio::run()
obuf_sz_max = obuf_sz;
}
obuf_sz = swr_convert(swr, &obuf, obuf_sz,
(const uint8_t **)frame->extended_data, frame->nb_samples);
(const uint8_t **)frame->extended_data, frame->nb_samples);
#if (LIBAVUTIL_VERSION_MAJOR < 54)
curr_pts = av_frame_get_best_effort_timestamp(frame);
#else
curr_pts = frame->best_effort_timestamp;
#endif
hal_debug("%s: pts 0x%" PRIx64 " %3f\n", __func__, curr_pts, curr_pts/90000.0);
hal_debug("%s: pts 0x%" PRIx64 " %3f\n", __func__, curr_pts, curr_pts / 90000.0);
int o_buf_sz = av_samples_get_buffer_size(&out_linesize, o_ch, obuf_sz, AV_SAMPLE_FMT_S16, 1);
if (o_buf_sz > 0)
ao_play(adevice, (char *)obuf, o_buf_sz);
@@ -507,13 +532,13 @@ void cAudio::run()
// ao_close(adevice); /* can take long :-( */
av_free(obuf);
swr_free(&swr);
out3:
out3:
av_frame_free(&frame);
out2:
out2:
avcodec_close(c);
av_free(c);
c = NULL;
out:
out:
avformat_close_input(&avfc);
av_free(pIOCtx->buffer);
av_free(pIOCtx);

View File

@@ -14,7 +14,8 @@ typedef enum
AUDIO_SYNC_AUDIO_MASTER
} AUDIO_SYNC_MODE;
typedef enum {
typedef enum
{
HDMI_ENCODED_OFF,
HDMI_ENCODED_AUTO,
HDMI_ENCODED_FORCED
@@ -40,7 +41,7 @@ typedef enum
class cAudio : public OpenThreads::Thread
{
friend class cPlayback;
friend class cPlayback;
private:
int fd;
bool Muted;
@@ -49,7 +50,7 @@ class cAudio : public OpenThreads::Thread
int mixer_fd; /* if we are using the OSS mixer */
int mixer_num; /* oss mixer to use, if any */
int StreamType;
int StreamType;
AUDIO_SYNC_MODE SyncMode;
bool started;
bool thread_started;
@@ -68,32 +69,53 @@ class cAudio : public OpenThreads::Thread
/* construct & destruct */
cAudio(void *, void *, void *);
~cAudio(void);
int64_t getPts() { return curr_pts; }
int64_t getPts()
{
return curr_pts;
}
void *GetHandle() { return NULL; };
void *GetHandle()
{
return NULL;
};
/* shut up */
int mute(bool remember = true) { return do_mute(true, remember); };
int unmute(bool remember = true) { return do_mute(false, remember); };
int mute(bool remember = true)
{
return do_mute(true, remember);
};
int unmute(bool remember = true)
{
return do_mute(false, remember);
};
/* volume, min = 0, max = 255 */
int setVolume(unsigned int left, unsigned int right);
int getVolume(void) { return volume;}
bool getMuteStatus(void) { return Muted; };
int getVolume(void)
{
return volume;
}
bool getMuteStatus(void)
{
return Muted;
};
/* start and stop audio */
int Start(void);
int Stop(void);
bool Pause(bool Pcm = true);
void SetStreamType(int bypass);
int GetStreamType(void) { return StreamType; }
int GetStreamType(void)
{
return StreamType;
}
void SetSyncMode(AVSYNC_TYPE Mode);
/* select channels */
int setChannel(int channel);
int PrepareClipPlay(int uNoOfChannels, int uSampleRate, int uBitsPerSample, int bLittleEndian);
int WriteClip(unsigned char * buffer, int size);
int WriteClip(unsigned char *buffer, int size);
int StopClip();
void getAudioInfo(int &type, int &layer, int& freq, int &bitrate, int &mode);
void getAudioInfo(int &type, int &layer, int &freq, int &bitrate, int &mode);
void SetSRS(int iq_enable, int nmgr_enable, int iq_mode, int iq_level);
bool IsHdmiDDSupported();
void SetHdmiDD(bool enable);

View File

@@ -1,25 +1,25 @@
/*
Framebuffer implementation using clutter https://developer.gnome.org/clutter/
Copyright (C) 2016 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
Framebuffer implementation using clutter https://developer.gnome.org/clutter/
Copyright (C) 2016 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
based on the openGL framebuffer implementation
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
based on the openGL framebuffer implementation
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
TODO: AV-Sync code is "experimental" at best
TODO: AV-Sync code is "experimental" at best
*/
#include "config.h"
@@ -92,8 +92,8 @@ GLFbPC::GLFbPC(int x, int y, std::vector<unsigned char> &buf): mReInit(true), mS
*mX = x;
*mY = y;
av_reduce(&mOA.num, &mOA.den, x, y, INT_MAX);
mVA = mOA; /* initial aspect ratios are from the FB resolution, those */
_mVA = mVA; /* will be updated by the videoDecoder functions anyway */
mVA = mOA; /* initial aspect ratios are from the FB resolution, those */
_mVA = mVA; /* will be updated by the videoDecoder functions anyway */
mVAchanged = true;
mCrop = DISPLAY_AR_MODE_PANSCAN;
zoom = 1.0;
@@ -121,7 +121,7 @@ GLFbPC::GLFbPC(int x, int y, std::vector<unsigned char> &buf): mReInit(true), mS
unlink("/tmp/neutrino.input");
mkfifo("/tmp/neutrino.input", 0600);
input_fd = open("/tmp/neutrino.input", O_RDWR|O_CLOEXEC|O_NONBLOCK);
input_fd = open("/tmp/neutrino.input", O_RDWR | O_CLOEXEC | O_NONBLOCK);
if (input_fd < 0)
hal_info("%s: could not open /tmp/neutrino.input FIFO: %m\n", __func__);
initKeys();
@@ -214,7 +214,8 @@ void GLFramebuffer::run()
argv[0] = a;
argv[1] = NULL;
hal_info("GLFB: GL thread starting x %d y %d\n", x, y);
if (clutter_init(&argc, &argv) != CLUTTER_INIT_SUCCESS) {
if (clutter_init(&argc, &argv) != CLUTTER_INIT_SUCCESS)
{
hal_info("GLFB: error initializing clutter\n");
free(argv);
return;
@@ -228,7 +229,7 @@ void GLFramebuffer::run()
//g_signal_connect(stage, "destroy", G_CALLBACK(clutter_main_quit), NULL);
g_signal_connect(stage, "key-press-event", G_CALLBACK(GLFbPC::keyboardcb), (void *)1);
g_signal_connect(stage, "key-release-event", G_CALLBACK(GLFbPC::keyboardcb), NULL);
clutter_stage_set_user_resizable(CLUTTER_STAGE (stage), TRUE);
clutter_stage_set_user_resizable(CLUTTER_STAGE(stage), TRUE);
clutter_actor_grab_key_focus(stage);
clutter_actor_show(stage);
@@ -241,7 +242,8 @@ void GLFramebuffer::run()
vid_actor = clutter_actor_new();
ClutterContent *fb = clutter_image_new();
/* osd_buf, because it starts up black */
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf.data(), COGL_PIXEL_FORMAT_BGR_888, x, y, x*3, NULL)) {
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf.data(), COGL_PIXEL_FORMAT_BGR_888, x, y, x * 3, NULL))
{
hal_info("GLFB::%s clutter_image_set_data failed? (vid)\n", __func__);
_exit(1); /* life is hard */
}
@@ -260,7 +262,8 @@ void GLFramebuffer::run()
fb_actor = clutter_actor_new();
fb = clutter_image_new();
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf.data(), COGL_PIXEL_FORMAT_BGRA_8888, x, y, x*4, NULL)) {
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf.data(), COGL_PIXEL_FORMAT_BGRA_8888, x, y, x * 4, NULL))
{
hal_info("GLFB::%s clutter_image_set_data failed? (osd)\n", __func__);
_exit(1); /* life is hard */
}
@@ -293,14 +296,14 @@ void GLFramebuffer::run()
/* static */ bool GLFbPC::keyboardcb(ClutterActor * /*actor*/, ClutterEvent *event, gpointer user_data)
{
guint key = clutter_event_get_key_symbol (event);
guint key = clutter_event_get_key_symbol(event);
int keystate = user_data ? 1 : 0;
hal_info_c("GLFB::%s: 0x%x, %d\n", __func__, key, keystate);
struct input_event ev;
if (key == 'f' && keystate)
{
hal_info_c("GLFB::%s: toggle fullscreen %s\n", __func__, glfb_priv->mFullscreen?"off":"on");
hal_info_c("GLFB::%s: toggle fullscreen %s\n", __func__, glfb_priv->mFullscreen ? "off" : "on");
glfb_priv->mFullscreen = !(glfb_priv->mFullscreen);
glfb_priv->mReInit = true;
return true;
@@ -314,7 +317,7 @@ void GLFramebuffer::run()
gettimeofday(&ev.time, NULL);
hal_debug_c("GLFB::%s: pushing 0x%x\n", __func__, ev.code);
ssize_t w = write(glfb_priv->input_fd, &ev, sizeof(ev));
if(w < 0)
if (w < 0)
return false;
return true;
}
@@ -323,7 +326,7 @@ int sleep_us = 30000;
void GLFbPC::render()
{
if(mShutDown)
if (mShutDown)
clutter_main_quit();
mReInitLock.lock();
@@ -338,22 +341,26 @@ void GLFbPC::render()
mY = &_mY[mFullscreen];
#endif
*mX = *mY * mOA.num / mOA.den;
if (mFullscreen) {
if (mFullscreen)
{
clutter_stage_set_fullscreen(CLUTTER_STAGE(stage), TRUE);
clutter_actor_show(stage);
clutter_stage_ensure_redraw(CLUTTER_STAGE(stage));
} else {
}
else
{
clutter_stage_set_fullscreen(CLUTTER_STAGE(stage), FALSE);
// *mX = *mY * mOA.num / mOA.den;
clutter_actor_set_size(stage, *mX, *mY);
}
hal_info("%s: reinit mX:%d mY:%d xoff:%d yoff:%d fs %d\n",
__func__, *mX, *mY, xoff, yoff, mFullscreen);
__func__, *mX, *mY, xoff, yoff, mFullscreen);
}
mReInitLock.unlock();
bltDisplayBuffer(); /* decoded video stream */
if (mState.blit) {
if (mState.blit)
{
/* only blit manually after fb->blit(), this helps to find missed blit() calls */
mState.blit = false;
hal_debug("GLFB::%s blit!\n", __func__);
@@ -368,15 +375,17 @@ void GLFbPC::render()
//xscale = 1.0;
int cmp = av_cmp_q(mVA, mOA);
const AVRational a149 = { 14, 9 };
switch (cmp) {
switch (cmp)
{
default:
case INT_MIN: /* invalid */
case 0: /* identical */
case INT_MIN: /* invalid */
case 0: /* identical */
hal_debug("%s: mVA == mOA (or fullscreen mode :-)\n", __func__);
break;
case 1: /* mVA > mOA -- video is wider than display */
case 1: /* mVA > mOA -- video is wider than display */
hal_debug("%s: mVA > mOA\n", __func__);
switch (mCrop) {
switch (mCrop)
{
case DISPLAY_AR_MODE_PANSCAN:
zoom = av_q2d(mVA) / av_q2d(mOA);
break;
@@ -393,18 +402,20 @@ void GLFbPC::render()
break;
}
break;
case -1: /* mVA < mOA -- video is taller than display */
case -1: /* mVA < mOA -- video is taller than display */
hal_debug("%s: mVA < mOA\n", __func__);
switch (mCrop) {
switch (mCrop)
{
case DISPLAY_AR_MODE_LETTERBOX:
break;
case DISPLAY_AR_MODE_PANSCAN2:
if (av_cmp_q(a149, mOA) < 0) {
if (av_cmp_q(a149, mOA) < 0)
{
zoom = av_q2d(mVA) * av_q2d(a149) / av_q2d(mOA);
break;
}
// fall through
/* fallthrough for output format 14:9 */
// fall through
/* fallthrough for output format 14:9 */
case DISPLAY_AR_MODE_PANSCAN:
zoom = av_q2d(mOA) / av_q2d(mVA);
break;
@@ -416,11 +427,11 @@ void GLFbPC::render()
}
break;
}
hal_debug("zoom: %f xscale: %f xzoom: %f\n", zoom, xscale,xzoom);
clutter_actor_set_scale(vid_actor, xscale*zoom*xzoom, zoom);
hal_debug("zoom: %f xscale: %f xzoom: %f\n", zoom, xscale, xzoom);
clutter_actor_set_scale(vid_actor, xscale * zoom * xzoom, zoom);
}
clutter_timeline_stop(tl);
clutter_timeline_set_delay(tl, sleep_us/1000);
clutter_timeline_set_delay(tl, sleep_us / 1000);
clutter_timeline_start(tl);
}
@@ -430,7 +441,8 @@ void GLFbPC::bltOSDBuffer()
int x = glfb_priv->mState.width;
int y = glfb_priv->mState.height;
ClutterContent *fb = clutter_image_new();
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf->data(), COGL_PIXEL_FORMAT_BGRA_8888, x, y, x*4, NULL)) {
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf->data(), COGL_PIXEL_FORMAT_BGRA_8888, x, y, x * 4, NULL))
{
hal_info("GLFB::%s clutter_image_set_data failed?\n", __func__);
_exit(1); /* life is hard */
}
@@ -446,7 +458,8 @@ void GLFbPC::bltDisplayBuffer()
return;
static bool warn = true;
cVideo::SWFramebuffer *buf = videoDecoder->getDecBuf();
if (!buf) {
if (!buf)
{
if (warn)
hal_info("GLFB::%s did not get a buffer...\n", __func__);
warn = false;
@@ -458,18 +471,20 @@ void GLFbPC::bltDisplayBuffer()
return;
AVRational a = buf->AR();
if (a.den != 0 && a.num != 0 && av_cmp_q(a, _mVA)) {
if (a.den != 0 && a.num != 0 && av_cmp_q(a, _mVA))
{
_mVA = a;
/* _mVA is the raw buffer's aspect, mVA is the real scaled output aspect */
av_reduce(&mVA.num, &mVA.den, w * a.num, h * a.den, INT_MAX);
// mVA.num: 16 mVA.den: 9 w: 720 h: 576
// 16*576/720/9 = 1.42222
xscale = (double)mVA.num*h/(double)mVA.den/w;
xscale = (double)mVA.num * h / (double)mVA.den / w;
mVAchanged = true;
}
ClutterContent *fb = clutter_image_new();
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), &(*buf)[0], COGL_PIXEL_FORMAT_BGR_888, w, h, w*3, NULL)) {
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), &(*buf)[0], COGL_PIXEL_FORMAT_BGR_888, w, h, w * 3, NULL))
{
hal_info("GLFB::%s clutter_image_set_data failed?\n", __func__);
_exit(1); /* life is hard */
}
@@ -484,10 +499,11 @@ void GLFbPC::bltDisplayBuffer()
int64_t vpts = buf->pts();
if (audioDecoder)
apts = audioDecoder->getPts();
if (apts != last_apts) {
if (apts != last_apts)
{
int rate, dummy1, dummy2;
if (apts < vpts)
sleep_us = (sleep_us * 2 + (vpts - apts)*10/9) / 3;
sleep_us = (sleep_us * 2 + (vpts - apts) * 10 / 9) / 3;
else if (sleep_us > 1000)
sleep_us -= 1000;
last_apts = apts;
@@ -502,5 +518,5 @@ void GLFbPC::bltDisplayBuffer()
sleep_us = 1;
}
hal_debug("vpts: 0x%" PRIx64 " apts: 0x%" PRIx64 " diff: %6.3f sleep_us %d buf %d\n",
buf->pts(), apts, (buf->pts() - apts)/90000.0, sleep_us, videoDecoder->buf_num);
buf->pts(), apts, (buf->pts() - apts) / 90000.0, sleep_us, videoDecoder->buf_num);
}

View File

@@ -44,15 +44,16 @@ extern cVideo *videoDecoder;
#define hal_info_c(args...) _hal_info(HAL_DEBUG_DEMUX, NULL, args)
#define dmx_err(_errfmt, _errstr, _revents) do { \
hal_info("%s " _errfmt " fd:%d, ev:0x%x %s pid:0x%04hx flt:0x%02hx\n", \
__func__, _errstr, fd, _revents, DMX_T[dmx_type], pid, flt); \
} while(0);
hal_info("%s " _errfmt " fd:%d, ev:0x%x %s pid:0x%04hx flt:0x%02hx\n", \
__func__, _errstr, fd, _revents, DMX_T[dmx_type], pid, flt); \
} while(0);
cDemux *videoDemux = NULL;
cDemux *audioDemux = NULL;
//cDemux *pcrDemux = NULL;
static const char *DMX_T[] = {
static const char *DMX_T[] =
{
"DMX_INVALID",
"DMX_VIDEO",
"DMX_AUDIO",
@@ -64,7 +65,8 @@ static const char *DMX_T[] = {
};
/* map the device numbers. for now only demux0 is used */
static const char *devname[] = {
static const char *devname[] =
{
"/dev/dvb/adapter0/demux0",
"/dev/dvb/adapter0/demux0",
"/dev/dvb/adapter0/demux0"
@@ -97,7 +99,7 @@ cDemux::~cDemux()
bool cDemux::Open(DMX_CHANNEL_TYPE pes_type, void * /*hVideoBuffer*/, int uBufferSize)
{
int devnum = num;
int flags = O_RDWR|O_CLOEXEC;
int flags = O_RDWR | O_CLOEXEC;
if (fd > -1)
hal_info("%s FD ALREADY OPENED? fd = %d\n", __FUNCTION__, fd);
@@ -112,12 +114,12 @@ bool cDemux::Open(DMX_CHANNEL_TYPE pes_type, void * /*hVideoBuffer*/, int uBuffe
return false;
}
hal_debug("%s #%d pes_type: %s(%d), uBufferSize: %d fd: %d\n", __func__,
num, DMX_T[pes_type], pes_type, uBufferSize, fd);
num, DMX_T[pes_type], pes_type, uBufferSize, fd);
if (dmx_type == DMX_VIDEO_CHANNEL)
uBufferSize = 0x100000; /* 1MB */
uBufferSize = 0x100000; /* 1MB */
if (dmx_type == DMX_AUDIO_CHANNEL)
uBufferSize = 0x10000; /* 64k */
uBufferSize = 0x10000; /* 64k */
#if 0
if (!pesfds.empty())
{
@@ -191,17 +193,17 @@ int cDemux::Read(unsigned char *buff, int len, int timeout)
#if 0
if (len != 4095 && timeout != 100)
fprintf(stderr, "cDemux::%s #%d fd: %d type: %s len: %d timeout: %d\n",
__FUNCTION__, num, fd, DMX_T[dmx_type], len, timeout);
__FUNCTION__, num, fd, DMX_T[dmx_type], len, timeout);
#endif
int rc;
struct pollfd ufds;
ufds.fd = fd;
ufds.events = POLLIN|POLLPRI|POLLERR;
ufds.events = POLLIN | POLLPRI | POLLERR;
ufds.revents = 0;
if (timeout > 0)
{
retry:
retry:
rc = ::poll(&ufds, 1, timeout);
if (!rc)
return 0; // timeout
@@ -242,9 +244,9 @@ int cDemux::Read(unsigned char *buff, int len, int timeout)
return rc;
}
bool cDemux::sectionFilter(unsigned short _pid, const unsigned char * const filter,
const unsigned char * const mask, int len, int timeout,
const unsigned char * const negmask)
bool cDemux::sectionFilter(unsigned short _pid, const unsigned char *const filter,
const unsigned char *const mask, int len, int timeout,
const unsigned char *const negmask)
{
struct dmx_sct_filter_params s_flt;
memset(&s_flt, 0, sizeof(s_flt));
@@ -263,79 +265,80 @@ bool cDemux::sectionFilter(unsigned short _pid, const unsigned char * const filt
if (negmask != NULL)
memcpy(s_flt.filter.mode, negmask, len);
s_flt.flags = DMX_IMMEDIATE_START|DMX_CHECK_CRC;
s_flt.flags = DMX_IMMEDIATE_START | DMX_CHECK_CRC;
int to = 0;
switch (filter[0]) {
case 0x00: /* program_association_section */
to = 2000;
break;
case 0x01: /* conditional_access_section */
to = 6000;
break;
case 0x02: /* program_map_section */
to = 1500;
break;
case 0x03: /* transport_stream_description_section */
to = 10000;
break;
/* 0x04 - 0x3F: reserved */
case 0x40: /* network_information_section - actual_network */
to = 10000;
break;
case 0x41: /* network_information_section - other_network */
to = 15000;
break;
case 0x42: /* service_description_section - actual_transport_stream */
to = 10000;
break;
/* 0x43 - 0x45: reserved for future use */
case 0x46: /* service_description_section - other_transport_stream */
to = 10000;
break;
/* 0x47 - 0x49: reserved for future use */
case 0x4A: /* bouquet_association_section */
to = 11000;
break;
/* 0x4B - 0x4D: reserved for future use */
case 0x4E: /* event_information_section - actual_transport_stream, present/following */
to = 2000;
break;
case 0x4F: /* event_information_section - other_transport_stream, present/following */
to = 10000;
break;
/* 0x50 - 0x5F: event_information_section - actual_transport_stream, schedule */
/* 0x60 - 0x6F: event_information_section - other_transport_stream, schedule */
case 0x70: /* time_date_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
//s_flt.pid = 0x0014;
to = 30000;
break;
case 0x71: /* running_status_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x72: /* stuffing_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x73: /* time_offset_section */
//s_flt.pid = 0x0014;
to = 30000;
break;
/* 0x74 - 0x7D: reserved for future use */
case 0x7E: /* discontinuity_information_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x7F: /* selection_information_section */
to = 0;
break;
/* 0x80 - 0x8F: ca_message_section */
/* 0x90 - 0xFE: user defined */
/* 0xFF: reserved */
default:
break;
switch (filter[0])
{
case 0x00: /* program_association_section */
to = 2000;
break;
case 0x01: /* conditional_access_section */
to = 6000;
break;
case 0x02: /* program_map_section */
to = 1500;
break;
case 0x03: /* transport_stream_description_section */
to = 10000;
break;
/* 0x04 - 0x3F: reserved */
case 0x40: /* network_information_section - actual_network */
to = 10000;
break;
case 0x41: /* network_information_section - other_network */
to = 15000;
break;
case 0x42: /* service_description_section - actual_transport_stream */
to = 10000;
break;
/* 0x43 - 0x45: reserved for future use */
case 0x46: /* service_description_section - other_transport_stream */
to = 10000;
break;
/* 0x47 - 0x49: reserved for future use */
case 0x4A: /* bouquet_association_section */
to = 11000;
break;
/* 0x4B - 0x4D: reserved for future use */
case 0x4E: /* event_information_section - actual_transport_stream, present/following */
to = 2000;
break;
case 0x4F: /* event_information_section - other_transport_stream, present/following */
to = 10000;
break;
/* 0x50 - 0x5F: event_information_section - actual_transport_stream, schedule */
/* 0x60 - 0x6F: event_information_section - other_transport_stream, schedule */
case 0x70: /* time_date_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
//s_flt.pid = 0x0014;
to = 30000;
break;
case 0x71: /* running_status_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x72: /* stuffing_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x73: /* time_offset_section */
//s_flt.pid = 0x0014;
to = 30000;
break;
/* 0x74 - 0x7D: reserved for future use */
case 0x7E: /* discontinuity_information_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x7F: /* selection_information_section */
to = 0;
break;
/* 0x80 - 0x8F: ca_message_section */
/* 0x90 - 0xFE: user defined */
/* 0xFF: reserved */
default:
break;
// return -1;
}
/* the negmask == NULL is a hack: the users of negmask are PMT-update
@@ -345,15 +348,22 @@ bool cDemux::sectionFilter(unsigned short _pid, const unsigned char * const filt
s_flt.timeout = to;
hal_debug("%s #%d pid:0x%04hx fd:%d type:%s len:%d to:%d flags:%x flt[0]:%02x\n", __func__, num,
pid, fd, DMX_T[dmx_type], len, s_flt.timeout,s_flt.flags, s_flt.filter.filter[0]);
pid, fd, DMX_T[dmx_type], len, s_flt.timeout, s_flt.flags, s_flt.filter.filter[0]);
if (debuglevel == 2) {
fprintf(stderr,"filt: ");for(int i=0;i<DMX_FILTER_SIZE;i++)fprintf(stderr,"%02hhx ",s_flt.filter.filter[i]);fprintf(stderr,"\n");
fprintf(stderr,"mask: ");for(int i=0;i<DMX_FILTER_SIZE;i++)fprintf(stderr,"%02hhx ",s_flt.filter.mask [i]);fprintf(stderr,"\n");
fprintf(stderr,"mode: ");for(int i=0;i<DMX_FILTER_SIZE;i++)fprintf(stderr,"%02hhx ",s_flt.filter.mode [i]);fprintf(stderr,"\n");
if (debuglevel == 2)
{
fprintf(stderr, "filt: ");
for (int i = 0; i < DMX_FILTER_SIZE; i++)fprintf(stderr, "%02hhx ", s_flt.filter.filter[i]);
fprintf(stderr, "\n");
fprintf(stderr, "mask: ");
for (int i = 0; i < DMX_FILTER_SIZE; i++)fprintf(stderr, "%02hhx ", s_flt.filter.mask [i]);
fprintf(stderr, "\n");
fprintf(stderr, "mode: ");
for (int i = 0; i < DMX_FILTER_SIZE; i++)fprintf(stderr, "%02hhx ", s_flt.filter.mode [i]);
fprintf(stderr, "\n");
}
ioctl (fd, DMX_STOP);
ioctl(fd, DMX_STOP);
if (ioctl(fd, DMX_SET_FILTER, &s_flt) < 0)
return false;
@@ -369,7 +379,7 @@ bool cDemux::pesFilter(const unsigned short _pid)
* this check originally is from tuxbox cvs but I'm not sure
* what it is good for...
if (pid <= 0x0001 && dmx_type != DMX_PCR_ONLY_CHANNEL)
return false;
return false;
*/
if ((pid >= 0x0002 && pid <= 0x000f) || pid >= 0x1fff)
return false;
@@ -381,35 +391,36 @@ bool cDemux::pesFilter(const unsigned short _pid)
p_flt.output = DMX_OUT_DECODER;
p_flt.input = DMX_IN_FRONTEND;
switch (dmx_type) {
case DMX_PCR_ONLY_CHANNEL:
p_flt.pes_type = DMX_PES_PCR;
if (HAL_nodec)
return true;
break;
case DMX_AUDIO_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
if (HAL_nodec) /* no need to demux if we don't decode... */
return true;
break;
case DMX_VIDEO_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
if (HAL_nodec)
return true;
break;
case DMX_PES_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TAP;
break;
case DMX_TP_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
break;
default:
hal_info("%s #%d invalid dmx_type %d!\n", __func__, num, dmx_type);
return false;
switch (dmx_type)
{
case DMX_PCR_ONLY_CHANNEL:
p_flt.pes_type = DMX_PES_PCR;
if (HAL_nodec)
return true;
break;
case DMX_AUDIO_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
if (HAL_nodec) /* no need to demux if we don't decode... */
return true;
break;
case DMX_VIDEO_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
if (HAL_nodec)
return true;
break;
case DMX_PES_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TAP;
break;
case DMX_TP_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
break;
default:
hal_info("%s #%d invalid dmx_type %d!\n", __func__, num, dmx_type);
return false;
}
return (ioctl(fd, DMX_SET_PES_FILTER, &p_flt) >= 0);
}
@@ -461,7 +472,8 @@ void cDemux::removePid(unsigned short Pid)
}
for (std::vector<pes_pids>::iterator i = pesfds.begin(); i != pesfds.end(); ++i)
{
if ((*i).pid == Pid) {
if ((*i).pid == Pid)
{
hal_debug("removePid: removing demux fd %d pid 0x%04x\n", fd, Pid);
if (ioctl(fd, DMX_REMOVE_PID, Pid) < 0)
hal_info("%s: (DMX_REMOVE_PID, 0x%04hx): %m\n", __func__, Pid);
@@ -472,7 +484,7 @@ void cDemux::removePid(unsigned short Pid)
hal_info("%s pid 0x%04x not found\n", __FUNCTION__, Pid);
}
void cDemux::getSTC(int64_t * STC)
void cDemux::getSTC(int64_t *STC)
{
int64_t pts = 0;
if (videoDecoder)

View File

@@ -1,25 +1,25 @@
/*
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
openGL based framebuffer implementation
based on Carjay's neutrino-hd-dvbapi work, see
http://gitorious.org/neutrino-hd/neutrino-hd-dvbapi
openGL based framebuffer implementation
based on Carjay's neutrino-hd-dvbapi work, see
http://gitorious.org/neutrino-hd/neutrino-hd-dvbapi
TODO: AV-Sync code is "experimental" at best
TODO: AV-Sync code is "experimental" at best
*/
#include "config.h"
@@ -90,8 +90,8 @@ GLFbPC::GLFbPC(int x, int y, std::vector<unsigned char> &buf): mReInit(true), mS
*mX = x;
*mY = y;
av_reduce(&mOA.num, &mOA.den, x, y, INT_MAX);
mVA = mOA; /* initial aspect ratios are from the FB resolution, those */
_mVA = mVA; /* will be updated by the videoDecoder functions anyway */
mVA = mOA; /* initial aspect ratios are from the FB resolution, those */
_mVA = mVA; /* will be updated by the videoDecoder functions anyway */
mVAchanged = true;
mCrop = DISPLAY_AR_MODE_PANSCAN;
zoom = 1.0;
@@ -119,7 +119,7 @@ GLFbPC::GLFbPC(int x, int y, std::vector<unsigned char> &buf): mReInit(true), mS
unlink("/tmp/neutrino.input");
mkfifo("/tmp/neutrino.input", 0600);
input_fd = open("/tmp/neutrino.input", O_RDWR|O_CLOEXEC|O_NONBLOCK);
input_fd = open("/tmp/neutrino.input", O_RDWR | O_CLOEXEC | O_NONBLOCK);
if (input_fd < 0)
hal_info("%s: could not open /tmp/neutrino.input FIFO: %m\n", __func__);
initKeys();
@@ -221,12 +221,12 @@ void GLFramebuffer::run()
/* init the good stuff */
GLenum err = glewInit();
if(err == GLEW_OK)
if (err == GLEW_OK)
{
if((!GLEW_VERSION_1_5)||(!GLEW_EXT_pixel_buffer_object)||(!GLEW_ARB_texture_non_power_of_two))
if ((!GLEW_VERSION_1_5) || (!GLEW_EXT_pixel_buffer_object) || (!GLEW_ARB_texture_non_power_of_two))
{
hal_info("GLFB: Sorry, your graphics card is not supported. "
"Needs at least OpenGL 1.5, pixel buffer objects and NPOT textures.\n");
"Needs at least OpenGL 1.5, pixel buffer objects and NPOT textures.\n");
hal_info("incompatible graphics card: %m");
_exit(1); /* Life is hard */
}
@@ -262,7 +262,8 @@ void GLFbPC::setupCtx()
}
void GLFbPC::setupOSDBuffer()
{ /* the OSD buffer size can be decoupled from the actual
{
/* the OSD buffer size can be decoupled from the actual
window size since the GL can blit-stretch with no
trouble at all, ah, the luxury of ignorance... */
// mMutex.lock();
@@ -325,7 +326,7 @@ void GLFbPC::releaseGLObjects()
struct input_event ev;
if (key == 'f')
{
hal_info_c("GLFB::%s: toggle fullscreen %s\n", __func__, glfb_priv->mFullscreen?"off":"on");
hal_info_c("GLFB::%s: toggle fullscreen %s\n", __func__, glfb_priv->mFullscreen ? "off" : "on");
glfb_priv->mFullscreen = !(glfb_priv->mFullscreen);
glfb_priv->mReInit = true;
return;
@@ -364,7 +365,7 @@ int sleep_us = 30000;
void GLFbPC::render()
{
if(mShutDown)
if (mShutDown)
glutLeaveMainLoop();
mReInitLock.lock();
@@ -376,7 +377,8 @@ void GLFbPC::render()
mReInit = false;
mX = &_mX[mFullscreen];
mY = &_mY[mFullscreen];
if (mFullscreen) {
if (mFullscreen)
{
int x = glutGet(GLUT_SCREEN_WIDTH);
int y = glutGet(GLUT_SCREEN_HEIGHT);
*mX = x;
@@ -389,17 +391,18 @@ void GLFbPC::render()
xoff = (x - *mX) / 2;
yoff = (y - *mY) / 2;
glutFullScreen();
} else
}
else
*mX = *mY * mOA.num / mOA.den;
hal_info("%s: reinit mX:%d mY:%d xoff:%d yoff:%d fs %d\n",
__func__, *mX, *mY, xoff, yoff, mFullscreen);
__func__, *mX, *mY, xoff, yoff, mFullscreen);
glViewport(xoff, yoff, *mX, *mY);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
float aspect = static_cast<float>(*mX)/ *mY;
float aspect = static_cast<float>(*mX) / *mY;
float osdaspect = static_cast<float>(mOA.den) / mOA.num;
glOrtho(aspect*-osdaspect, aspect*osdaspect, -1.0, 1.0, -1.0, 1.0 );
glOrtho(aspect * -osdaspect, aspect * osdaspect, -1.0, 1.0, -1.0, 1.0);
glClearColor(0.0, 0.0, 0.0, 1.0);
glMatrixMode(GL_MODELVIEW);
@@ -414,7 +417,8 @@ void GLFbPC::render()
glutReshapeWindow(*mX, *mY);
bltDisplayBuffer(); /* decoded video stream */
if (mState.blit) {
if (mState.blit)
{
/* only blit manually after fb->blit(), this helps to find missed blit() calls */
mState.blit = false;
hal_debug("GLFB::%s blit!\n", __func__);
@@ -431,16 +435,18 @@ void GLFbPC::render()
xscale = 1.0;
int cmp = (mCrop == DISPLAY_AR_MODE_NONE) ? 0 : av_cmp_q(mVA, mOA);
const AVRational a149 = { 14, 9 };
switch (cmp) {
switch (cmp)
{
default:
case INT_MIN: /* invalid */
case 0: /* identical */
case INT_MIN: /* invalid */
case 0: /* identical */
hal_debug("%s: mVA == mOA (or fullscreen mode :-)\n", __func__);
break;
case 1: /* mVA > mOA -- video is wider than display */
case 1: /* mVA > mOA -- video is wider than display */
hal_debug("%s: mVA > mOA\n", __func__);
xscale = av_q2d(mVA) / av_q2d(mOA);
switch (mCrop) {
switch (mCrop)
{
case DISPLAY_AR_MODE_PANSCAN:
break;
case DISPLAY_AR_MODE_LETTERBOX:
@@ -453,18 +459,20 @@ void GLFbPC::render()
break;
}
break;
case -1: /* mVA < mOA -- video is taller than display */
case -1: /* mVA < mOA -- video is taller than display */
hal_debug("%s: mVA < mOA\n", __func__);
xscale = av_q2d(mVA) / av_q2d(mOA);
switch (mCrop) {
switch (mCrop)
{
case DISPLAY_AR_MODE_LETTERBOX:
break;
case DISPLAY_AR_MODE_PANSCAN2:
if (av_cmp_q(a149, mOA) < 0) {
if (av_cmp_q(a149, mOA) < 0)
{
zoom = av_q2d(mVA) * av_q2d(a149) / av_q2d(mOA);
break;
}
/* fallthrough for output format 14:9 */
/* fallthrough for output format 14:9 */
case DISPLAY_AR_MODE_PANSCAN:
zoom = av_q2d(mOA) / av_q2d(mVA);
break;
@@ -500,11 +508,15 @@ void GLFbPC::checkReinit(int x, int y)
static int last_x = 0, last_y = 0;
mReInitLock.lock();
if (!mFullscreen && !mReInit && (x != *mX || y != *mY)) {
if (x != *mX && abs(x - last_x) > 2) {
if (!mFullscreen && !mReInit && (x != *mX || y != *mY))
{
if (x != *mX && abs(x - last_x) > 2)
{
*mX = x;
*mY = *mX * mOA.den / mOA.num;
} else if (y != *mY && abs(y - last_y) > 2) {
}
else if (y != *mY && abs(y - last_y) > 2)
{
*mY = y;
*mX = *mY * mOA.num / mOA.den;
}
@@ -517,25 +529,29 @@ void GLFbPC::checkReinit(int x, int y)
void GLFbPC::drawSquare(float size, float x_factor)
{
GLfloat vertices[] = {
1.0f, 1.0f,
GLfloat vertices[] =
{
1.0f, 1.0f,
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, -1.0f,
};
GLubyte indices[] = { 0, 1, 2, 3 };
GLfloat texcoords[] = {
1.0, 0.0,
0.0, 0.0,
0.0, 1.0,
1.0, 1.0,
GLfloat texcoords[] =
{
1.0, 0.0,
0.0, 0.0,
0.0, 1.0,
1.0, 1.0,
};
if (x_factor > -99.0) { /* x_factor == -100 => OSD */
if (x_factor > -99.0) /* x_factor == -100 => OSD */
{
if (videoDecoder &&
videoDecoder->pig_x > 0 && videoDecoder->pig_y > 0 &&
videoDecoder->pig_w > 0 && videoDecoder->pig_h > 0) {
videoDecoder->pig_w > 0 && videoDecoder->pig_h > 0)
{
/* these calculations even consider cropping and panscan mode
* maybe this could be done with some clever opengl tricks? */
double w2 = (double)mState.width * 0.5l;
@@ -546,16 +562,17 @@ void GLFbPC::drawSquare(float size, float x_factor)
double h = (double)videoDecoder->pig_h / h2;
x += ((1.0l - x_factor * size) / 2.0l) * w / x_factor / size;
y += ((size - 1.0l) / 2.0l) * h / size;
vertices[0] = x + w; /* top right x */
vertices[1] = y; /* top right y */
vertices[2] = x; /* top left x */
vertices[3] = y; /* top left y */
vertices[4] = x; /* bottom left x */
vertices[5] = y - h; /* bottom left y */
vertices[6] = vertices[0]; /* bottom right x */
vertices[7] = vertices[5]; /* bottom right y */
vertices[0] = x + w; /* top right x */
vertices[1] = y; /* top right y */
vertices[2] = x; /* top left x */
vertices[3] = y; /* top left y */
vertices[4] = x; /* bottom left x */
vertices[5] = y - h; /* bottom left y */
vertices[6] = vertices[0]; /* bottom right x */
vertices[7] = vertices[5]; /* bottom right y */
}
} else
}
else
x_factor = 1.0; /* OSD */
glPushMatrix();
@@ -589,7 +606,8 @@ void GLFbPC::bltDisplayBuffer()
return;
static bool warn = true;
cVideo::SWFramebuffer *buf = videoDecoder->getDecBuf();
if (!buf) {
if (!buf)
{
if (warn)
hal_info("GLFB::%s did not get a buffer...\n", __func__);
warn = false;
@@ -601,7 +619,8 @@ void GLFbPC::bltDisplayBuffer()
return;
AVRational a = buf->AR();
if (a.den != 0 && a.num != 0 && av_cmp_q(a, _mVA)) {
if (a.den != 0 && a.num != 0 && av_cmp_q(a, _mVA))
{
_mVA = a;
/* _mVA is the raw buffer's aspect, mVA is the real scaled output aspect */
av_reduce(&mVA.num, &mVA.den, w * a.num, h * a.den, INT_MAX);
@@ -624,10 +643,11 @@ void GLFbPC::bltDisplayBuffer()
int64_t vpts = buf->pts() + 18000;
if (audioDecoder)
apts = audioDecoder->getPts();
if (apts != last_apts) {
if (apts != last_apts)
{
int rate, dummy1, dummy2;
if (apts < vpts)
sleep_us = (sleep_us * 2 + (vpts - apts)*10/9) / 3;
sleep_us = (sleep_us * 2 + (vpts - apts) * 10 / 9) / 3;
else if (sleep_us > 1000)
sleep_us -= 1000;
last_apts = apts;
@@ -642,5 +662,5 @@ void GLFbPC::bltDisplayBuffer()
sleep_us = 1;
}
hal_debug("vpts: 0x%" PRIx64 " apts: 0x%" PRIx64 " diff: %6.3f sleep_us %d buf %d\n",
buf->pts(), apts, (buf->pts() - apts)/90000.0, sleep_us, videoDecoder->buf_num);
buf->pts(), apts, (buf->pts() - apts) / 90000.0, sleep_us, videoDecoder->buf_num);
}

View File

@@ -1,23 +1,23 @@
/*
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013,2016 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013,2016 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
********************************************************************
private stuff of the GLFB thread that is only used inside libstb-hal
and not exposed to the application.
********************************************************************
private stuff of the GLFB thread that is only used inside libstb-hal
and not exposed to the application.
*/
#ifndef __glfb_priv__
@@ -41,86 +41,108 @@ extern "C" {
class GLFbPC
{
public:
GLFbPC(int x, int y, std::vector<unsigned char> &buf);
~GLFbPC();
std::vector<unsigned char> *getOSDBuffer() { return osd_buf; } /* pointer to OSD bounce buffer */
int getOSDWidth() { return mState.width; }
int getOSDHeight() { return mState.height; }
void blit() { mState.blit = true; };
fb_var_screeninfo getScreenInfo() { return si; }
void setOutputFormat(AVRational a, int h, int c) { mOA = a; *mY = h; mCrop = c; mReInit = true; }
/* just make everything public for simplicity - this is only used inside libstb-hal anyway
private:
*/
fb_var_screeninfo si;
int *mX;
int *mY;
int _mX[2]; /* output window size */
int _mY[2]; /* [0] = normal, [1] = fullscreen */
AVRational mOA; /* output window aspect ratio */
AVRational mVA; /* video aspect ratio */
AVRational _mVA; /* for detecting changes in mVA */
bool mVAchanged;
float zoom; /* for cropping */
float xscale; /* and aspect ratio */
int mCrop; /* DISPLAY_AR_MODE */
public:
GLFbPC(int x, int y, std::vector<unsigned char> &buf);
~GLFbPC();
std::vector<unsigned char> *getOSDBuffer()
{
return osd_buf; /* pointer to OSD bounce buffer */
}
int getOSDWidth()
{
return mState.width;
}
int getOSDHeight()
{
return mState.height;
}
void blit()
{
mState.blit = true;
};
fb_var_screeninfo getScreenInfo()
{
return si;
}
void setOutputFormat(AVRational a, int h, int c)
{
mOA = a;
*mY = h;
mCrop = c;
mReInit = true;
}
/* just make everything public for simplicity - this is only used inside libstb-hal anyway
private:
*/
fb_var_screeninfo si;
int *mX;
int *mY;
int _mX[2]; /* output window size */
int _mY[2]; /* [0] = normal, [1] = fullscreen */
AVRational mOA; /* output window aspect ratio */
AVRational mVA; /* video aspect ratio */
AVRational _mVA; /* for detecting changes in mVA */
bool mVAchanged;
float zoom; /* for cropping */
float xscale; /* and aspect ratio */
int mCrop; /* DISPLAY_AR_MODE */
bool mFullscreen; /* fullscreen? */
bool mReInit; /* setup things for GL */
OpenThreads::Mutex mReInitLock;
bool mShutDown; /* if set main loop is left */
bool mInitDone; /* condition predicate */
// OpenThreads::Condition mInitCond; /* condition variable for init */
// mutable OpenThreads::Mutex mMutex; /* lock our data */
bool mFullscreen; /* fullscreen? */
bool mReInit; /* setup things for GL */
OpenThreads::Mutex mReInitLock;
bool mShutDown; /* if set main loop is left */
bool mInitDone; /* condition predicate */
// OpenThreads::Condition mInitCond; /* condition variable for init */
// mutable OpenThreads::Mutex mMutex; /* lock our data */
std::vector<unsigned char> *osd_buf; /* silly bounce buffer */
std::vector<unsigned char> *osd_buf; /* silly bounce buffer */
#if USE_OPENGL
std::map<unsigned char, int> mKeyMap;
std::map<int, int> mSpecialMap;
std::map<unsigned char, int> mKeyMap;
std::map<int, int> mSpecialMap;
#endif
#if USE_CLUTTER
std::map<int, int> mKeyMap;
std::map<int, int> mKeyMap;
#endif
int input_fd;
int64_t last_apts;
void run();
int input_fd;
int64_t last_apts;
void run();
static void rendercb(); /* callback for GLUT */
void render(); /* actual render function */
static void rendercb(); /* callback for GLUT */
void render(); /* actual render function */
#if USE_OPENGL
static void keyboardcb(unsigned char key, int x, int y);
static void specialcb(int key, int x, int y);
static void resizecb(int w, int h);
void checkReinit(int w, int h); /* e.g. in case window was resized */
void setupGLObjects(); /* PBOs, textures and stuff */
void releaseGLObjects();
void drawSquare(float size, float x_factor = 1); /* do not be square */
static void keyboardcb(unsigned char key, int x, int y);
static void specialcb(int key, int x, int y);
static void resizecb(int w, int h);
void checkReinit(int w, int h); /* e.g. in case window was resized */
void setupGLObjects(); /* PBOs, textures and stuff */
void releaseGLObjects();
void drawSquare(float size, float x_factor = 1); /* do not be square */
#endif
#if USE_CLUTTER
static bool keyboardcb(ClutterActor *actor, ClutterEvent *event, gpointer user_data);
static bool keyboardcb(ClutterActor *actor, ClutterEvent *event, gpointer user_data);
#endif
void initKeys(); /* setup key bindings for window */
void initKeys(); /* setup key bindings for window */
#if 0
void setupCtx(); /* create the window and make the context current */
void setupOSDBuffer(); /* create the OSD buffer */
void setupCtx(); /* create the window and make the context current */
void setupOSDBuffer(); /* create the OSD buffer */
#endif
struct {
int width; /* width and height, fixed for a framebuffer instance */
int height;
bool blit;
struct
{
int width; /* width and height, fixed for a framebuffer instance */
int height;
bool blit;
#if USE_OPENGL
GLuint osdtex; /* holds the OSD texture */
GLuint pbo; /* PBO we use for transfer to texture */
GLuint displaytex; /* holds the display texture */
GLuint displaypbo;
GLuint osdtex; /* holds the OSD texture */
GLuint pbo; /* PBO we use for transfer to texture */
GLuint displaytex; /* holds the display texture */
GLuint displaypbo;
#endif
} mState;
} mState;
void bltOSDBuffer();
void bltDisplayBuffer();
void bltOSDBuffer();
void bltDisplayBuffer();
};
#endif

View File

@@ -29,7 +29,7 @@ hw_caps_t *get_hwcaps(void)
initialized = 1;
caps.can_cpufreq = 0;
caps.can_shutdown = 1; /* for testing */
caps.can_shutdown = 1; /* for testing */
caps.display_type = HW_DISPLAY_LINE_TEXT;
caps.has_HDMI = 1;
caps.display_xres = 8;
@@ -40,9 +40,10 @@ hw_caps_t *get_hwcaps(void)
strcpy(caps.startup_file, "");
strcpy(caps.boxvendor, "Generic");
strcpy(caps.boxname, "PC");
if (! uname(&u)){
if (! uname(&u))
{
strncpy(caps.boxarch, u.machine, sizeof(caps.boxarch));
caps.boxarch[sizeof(caps.boxarch)-1] = '\0';
caps.boxarch[sizeof(caps.boxarch) - 1] = '\0';
}
else
fprintf(stderr, "%s: uname() failed: %m\n", __func__);

View File

@@ -18,7 +18,8 @@ void hal_api_init()
if (!initialized)
hal_debug_init();
hal_info("%s begin, initialized=%d, debug=0x%02x\n", __func__, (int)initialized, debuglevel);
if (! glfb) {
if (! glfb)
{
int x = 1280, y = 720; /* default OSD FB resolution */
/*
* export GLFB_RESOLUTION=720,576
@@ -28,7 +29,8 @@ void hal_api_init()
const char *p = NULL;
if (tmp)
p = strchr(tmp, ',');
if (p) {
if (p)
{
x = atoi(tmp);
y = atoi(p + 1);
}

View File

@@ -2,7 +2,7 @@
#include "playback_lib.h"
static const char * FILENAME = "playback-dummy";
static const char *FILENAME = "playback-dummy";
bool cPlayback::Open(playmode_t)
{
@@ -15,13 +15,13 @@ void cPlayback::Close(void)
bool cPlayback::Start(std::string filename, std::string headers)
{
return Start((char*) filename.c_str(),0,0,0,0,0, headers);
return Start((char *) filename.c_str(), 0, 0, 0, 0, 0, headers);
}
bool cPlayback::Start(char *filename, int vpid, int vtype, int apid, int ac3, int duration, std::string /*headers*/)
{
printf("%s:%s - filename=%s vpid=%u vtype=%d apid=%u ac3=%d duration=%i\n",
FILENAME, __func__, filename, vpid, vtype, apid, ac3, duration);
FILENAME, __func__, filename, vpid, vtype, apid, ac3, duration);
return true;
}
@@ -58,7 +58,7 @@ bool cPlayback::GetPosition(int &position, int &duration)
bool cPlayback::SetPosition(int position, bool)
{
printf("%s:%s %d\n", FILENAME, __func__,position);
printf("%s:%s %d\n", FILENAME, __func__, position);
return true;
}

View File

@@ -46,61 +46,67 @@ struct AVFormatContext;
class cPlayback
{
private:
bool playing, first;
bool decoders_closed;
private:
bool playing, first;
bool decoders_closed;
int mSpeed;
int mAudioStream;
int init_jump;
int mSpeed;
int mAudioStream;
int init_jump;
public:
playstate_t playstate;
public:
playstate_t playstate;
cPlayback(int);
bool Open(playmode_t PlayMode);
void Close(void);
bool Start(char *filename, int vpid, int vtype, int apid, int ac3, int duration, std::string headers = "");
bool Start(std::string filename, std::string headers = "");
bool Play(void);
bool SyncAV(void);
cPlayback(int);
bool Open(playmode_t PlayMode);
void Close(void);
bool Start(char *filename, int vpid, int vtype, int apid, int ac3, int duration, std::string headers = "");
bool Start(std::string filename, std::string headers = "");
bool Play(void);
bool SyncAV(void);
bool Stop(void);
bool SetAPid(int pid, bool ac3);
bool SetSubtitlePid(int pid);
bool SetTeletextPid(int pid);
bool Stop(void);
bool SetAPid(int pid, bool ac3);
bool SetSubtitlePid(int pid);
bool SetTeletextPid(int pid);
void trickSeek(int ratio);
bool SetSpeed(int speed);
bool SetSlow(int slow);
bool GetSpeed(int &speed) const;
bool GetPosition(int &position, int &duration);
void GetPts(uint64_t &pts);
int GetAPid(void);
int GetVPid(void);
int GetSubtitlePid(void);
bool SetPosition(int position, bool absolute = false);
void FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string *language);
void FindAllPids(uint16_t *apids, unsigned short *ac3flags, uint16_t *numpida, std::string *language) { FindAllPids((int*) apids, (unsigned int*) ac3flags, (unsigned int*) numpida, language); };
void FindAllSubtitlePids(int *pids, unsigned int *numpids, std::string *language);
void FindAllTeletextsubtitlePids(int *pids, unsigned int *numpidt, std::string *tlanguage, int *mags, int *pages);
void RequestAbort(void);
void FindAllSubs(int *pids, unsigned int *supported, unsigned int *numpida, std::string *language);
void FindAllSubs(uint16_t *pids, unsigned short *supported, uint16_t *numpida, std::string *language) { FindAllSubs((int*) pids, (unsigned int*) supported, (unsigned int*) numpida, language); };
bool SelectSubtitles(int pid, std::string charset = "");
void GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current);
void SetTitle(int title);
uint64_t GetReadCount(void);
void GetChapters(std::vector<int> &positions, std::vector<std::string> &titles);
void GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values);
AVFormatContext *GetAVFormatContext();
void ReleaseAVFormatContext();
std::string extra_headers;
std::string user_agent;
void trickSeek(int ratio);
bool SetSpeed(int speed);
bool SetSlow(int slow);
bool GetSpeed(int &speed) const;
bool GetPosition(int &position, int &duration);
void GetPts(uint64_t &pts);
int GetAPid(void);
int GetVPid(void);
int GetSubtitlePid(void);
bool SetPosition(int position, bool absolute = false);
void FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string *language);
void FindAllPids(uint16_t *apids, unsigned short *ac3flags, uint16_t *numpida, std::string *language)
{
FindAllPids((int *) apids, (unsigned int *) ac3flags, (unsigned int *) numpida, language);
};
void FindAllSubtitlePids(int *pids, unsigned int *numpids, std::string *language);
void FindAllTeletextsubtitlePids(int *pids, unsigned int *numpidt, std::string *tlanguage, int *mags, int *pages);
void RequestAbort(void);
void FindAllSubs(int *pids, unsigned int *supported, unsigned int *numpida, std::string *language);
void FindAllSubs(uint16_t *pids, unsigned short *supported, uint16_t *numpida, std::string *language)
{
FindAllSubs((int *) pids, (unsigned int *) supported, (unsigned int *) numpida, language);
};
bool SelectSubtitles(int pid, std::string charset = "");
void GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current);
void SetTitle(int title);
uint64_t GetReadCount(void);
void GetChapters(std::vector<int> &positions, std::vector<std::string> &titles);
void GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values);
AVFormatContext *GetAVFormatContext();
void ReleaseAVFormatContext();
std::string extra_headers;
std::string user_agent;
//
~cPlayback();
void getMeta();
//
~cPlayback();
void getMeta();
};
#endif // __PLAYBACK_GST_H__

View File

@@ -43,7 +43,7 @@
#define hal_debug_c(args...) _hal_debug(HAL_DEBUG_PLAYBACK, NULL, args)
#define hal_info_c(args...) _hal_info(HAL_DEBUG_PLAYBACK, NULL, args)
static const char * FILENAME = "[playback.cpp]";
static const char *FILENAME = "[playback.cpp]";
#include <gst/gst.h>
#include <gst/pbutils/missing-plugins.h>
@@ -63,86 +63,86 @@ typedef enum
} GstPlayFlags;
GstElement * m_gst_playbin = NULL;
GstElement * audioSink = NULL;
GstElement * videoSink = NULL;
gchar * uri = NULL;
GstTagList * m_stream_tags = 0;
GstElement *m_gst_playbin = NULL;
GstElement *audioSink = NULL;
GstElement *videoSink = NULL;
gchar *uri = NULL;
GstTagList *m_stream_tags = 0;
static int end_eof = 0;
extern GLFramebuffer *glfb;
gint match_sinktype(GstElement *element, gpointer type)
{
return strcmp(g_type_name(G_OBJECT_TYPE(element)), (const char*)type);
return strcmp(g_type_name(G_OBJECT_TYPE(element)), (const char *)type);
}
GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
GstBusSyncReply Gst_bus_call(GstBus *bus, GstMessage *msg, gpointer user_data)
{
gchar * sourceName;
gchar *sourceName;
// source
GstObject * source;
GstObject *source;
source = GST_MESSAGE_SRC(msg);
if (!GST_IS_OBJECT(source))
return GST_BUS_DROP;
sourceName = gst_object_get_name(source);
switch (GST_MESSAGE_TYPE(msg))
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
case GST_MESSAGE_EOS:
{
g_message("End-of-stream");
end_eof = 1;
break;
}
case GST_MESSAGE_ERROR:
case GST_MESSAGE_ERROR:
{
gchar * debug;
gchar *debug;
GError *err;
gst_message_parse_error(msg, &err, &debug);
g_free (debug);
hal_info_c( "%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName );
if ( err->domain == GST_STREAM_ERROR )
g_free(debug);
hal_info_c("%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName);
if (err->domain == GST_STREAM_ERROR)
{
if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
if (err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND)
{
if ( g_strrstr(sourceName, "videosink") )
hal_info_c( "%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
else if ( g_strrstr(sourceName, "audiosink") )
hal_info_c( "%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
if (g_strrstr(sourceName, "videosink"))
hal_info_c("%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event???
else if (g_strrstr(sourceName, "audiosink"))
hal_info_c("%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event???
}
}
g_error_free(err);
end_eof = 1; // NOTE: just to exit
end_eof = 1; // NOTE: just to exit
break;
}
case GST_MESSAGE_INFO:
{
gchar *debug;
GError *inf;
gst_message_parse_info (msg, &inf, &debug);
g_free (debug);
if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
gst_message_parse_info(msg, &inf, &debug);
g_free(debug);
if (inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE)
{
if ( g_strrstr(sourceName, "videosink") )
hal_info_c( "%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
if (g_strrstr(sourceName, "videosink"))
hal_info_c("%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event???
}
g_error_free(inf);
break;
}
case GST_MESSAGE_TAG:
{
GstTagList *tags, *result;
gst_message_parse_tag(msg, &tags);
result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
if (result)
{
@@ -150,45 +150,45 @@ GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
gst_tag_list_free(m_stream_tags);
m_stream_tags = result;
}
const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
if ( gv_image )
if (gv_image)
{
GstBuffer *buf_image;
buf_image = gst_value_get_buffer (gv_image);
int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
if(fd >= 0)
buf_image = gst_value_get_buffer(gv_image);
int fd = open("/tmp/.id3coverart", O_CREAT | O_WRONLY | O_TRUNC, 0644);
if (fd >= 0)
{
int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
close(fd);
hal_info_c( "%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__ , ret);
hal_info_c("%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__, ret);
}
//FIXME: how shall playback handle this event???
}
gst_tag_list_free(tags);
hal_info_c( "%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event???
hal_info_c("%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event???
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
if (GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
break;
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
if(old_state == new_state)
if (old_state == new_state)
break;
hal_info_c( "%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
hal_info_c("%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
switch(transition)
switch (transition)
{
case GST_STATE_CHANGE_NULL_TO_READY:
{
} break;
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
GstIterator *children;
@@ -197,7 +197,7 @@ GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
gst_object_unref(GST_OBJECT(audioSink));
audioSink = NULL;
}
if (videoSink)
{
gst_object_unref(GST_OBJECT(videoSink));
@@ -207,14 +207,15 @@ GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink"));
videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink"));
gst_iterator_free(children);
} break;
}
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
{
} break;
} break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
{
} break;
} break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
{
if (audioSink)
@@ -227,26 +228,27 @@ GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
gst_object_unref(GST_OBJECT(videoSink));
videoSink = NULL;
}
} break;
}
break;
case GST_STATE_CHANGE_READY_TO_NULL:
{
} break;
} break;
}
break;
}
#if 0
case GST_MESSAGE_ELEMENT:
{
if(gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id"))
if (gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id"))
{
// set window id
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), glfb->getWindowID());
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC(msg)), glfb->getWindowID());
// reshape window
gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight());
gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC(msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight());
// sync frames
gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)));
gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC(msg)));
}
}
#endif
@@ -260,25 +262,25 @@ GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
cPlayback::cPlayback(int num)
{
hal_info( "%s:%s\n", FILENAME, __FUNCTION__);
const gchar *nano_str;
guint major, minor, micro, nano;
{
hal_info("%s:%s\n", FILENAME, __FUNCTION__);
const gchar *nano_str;
guint major, minor, micro, nano;
gst_init(NULL, NULL);
gst_version (&major, &minor, &micro, &nano);
gst_version(&major, &minor, &micro, &nano);
if (nano == 1)
nano_str = "(CVS)";
else if (nano == 2)
nano_str = "(Prerelease)";
if (nano == 1)
nano_str = "(CVS)";
else if (nano == 2)
nano_str = "(Prerelease)";
else
nano_str = "";
nano_str = "";
hal_info( "%s:%s - This program is linked against GStreamer %d.%d.%d %s\n",
FILENAME, __FUNCTION__,
major, minor, micro, nano_str);
hal_info("%s:%s - This program is linked against GStreamer %d.%d.%d %s\n",
FILENAME, __FUNCTION__,
major, minor, micro, nano_str);
mAudioStream = 0;
mSpeed = 0;
@@ -288,8 +290,8 @@ cPlayback::cPlayback(int num)
}
cPlayback::~cPlayback()
{
hal_info( "%s:%s\n", FILENAME, __FUNCTION__);
{
hal_info("%s:%s\n", FILENAME, __FUNCTION__);
//FIXME: all deleting stuff is done in Close()
}
@@ -302,21 +304,21 @@ bool cPlayback::Open(playmode_t PlayMode)
// used by movieplay
void cPlayback::Close(void)
{
hal_info( "%s:%s\n", FILENAME, __FUNCTION__);
{
hal_info("%s:%s\n", FILENAME, __FUNCTION__);
Stop();
// disconnect bus handler
if (m_gst_playbin)
{
// disconnect sync handler callback
GstBus * bus = gst_pipeline_get_bus(GST_PIPELINE (m_gst_playbin));
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_gst_playbin));
gst_bus_set_sync_handler(bus, NULL, NULL);
gst_object_unref(bus);
hal_info( "%s:%s - GST bus handler closed\n", FILENAME, __FUNCTION__);
hal_info("%s:%s - GST bus handler closed\n", FILENAME, __FUNCTION__);
}
if (m_stream_tags)
gst_tag_list_free(m_stream_tags);
@@ -327,22 +329,22 @@ void cPlayback::Close(void)
{
gst_object_unref(GST_OBJECT(audioSink));
audioSink = NULL;
hal_info( "%s:%s - GST audio Sink closed\n", FILENAME, __FUNCTION__);
hal_info("%s:%s - GST audio Sink closed\n", FILENAME, __FUNCTION__);
}
if (videoSink)
{
gst_object_unref(GST_OBJECT(videoSink));
videoSink = NULL;
hal_info( "%s:%s - GST video Sink closed\n", FILENAME, __FUNCTION__);
hal_info("%s:%s - GST video Sink closed\n", FILENAME, __FUNCTION__);
}
// unref m_gst_playbin
gst_object_unref (GST_OBJECT (m_gst_playbin));
hal_info( "%s:%s - GST playbin closed\n", FILENAME, __FUNCTION__);
gst_object_unref(GST_OBJECT(m_gst_playbin));
hal_info("%s:%s - GST playbin closed\n", FILENAME, __FUNCTION__);
m_gst_playbin = NULL;
}
}
@@ -350,80 +352,80 @@ void cPlayback::Close(void)
// start
bool cPlayback::Start(std::string filename, std::string headers)
{
return Start((char*) filename.c_str(),0,0,0,0,0, headers);
return Start((char *) filename.c_str(), 0, 0, 0, 0, 0, headers);
}
bool cPlayback::Start(char *filename, int /*vpid*/, int /*vtype*/, int /*apid*/, int /*ac3*/, int /*duration*/, std::string headers)
{
hal_info( "%s:%s\n", FILENAME, __FUNCTION__);
hal_info("%s:%s\n", FILENAME, __FUNCTION__);
mAudioStream = 0;
//create playback path
char file[400] = {""};
bool isHTTP = false;
if(!strncmp("http://", filename, 7))
if (!strncmp("http://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("file://", filename, 7))
else if (!strncmp("file://", filename, 7))
{
isHTTP = false;
}
else if(!strncmp("upnp://", filename, 7))
else if (!strncmp("upnp://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("rtmp://", filename, 7))
else if (!strncmp("rtmp://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("rtsp://", filename, 7))
else if (!strncmp("rtsp://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("mms://", filename, 6))
else if (!strncmp("mms://", filename, 6))
{
isHTTP = true;
}
else
strcat(file, "file://");
strcat(file, "file://");
strcat(file, filename);
if (isHTTP)
uri = g_uri_escape_string(filename, G_URI_RESERVED_CHARS_GENERIC_DELIMITERS, true);
else
uri = g_filename_to_uri(filename, NULL, NULL);
hal_info("%s:%s - filename=%s\n", FILENAME, __FUNCTION__, filename);
// create gst pipeline
m_gst_playbin = gst_element_factory_make("playbin2", "playbin");
if(m_gst_playbin)
if (m_gst_playbin)
{
hal_info("%s:%s - m_gst_playbin\n", FILENAME, __FUNCTION__);
guint flags;
g_object_get(G_OBJECT (m_gst_playbin), "flags", &flags, NULL);
g_object_get(G_OBJECT(m_gst_playbin), "flags", &flags, NULL);
/* avoid video conversion, let the (hardware) sinks handle that */
flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
/* volume control is done by hardware */
flags &= ~GST_PLAY_FLAG_SOFT_VOLUME;
g_object_set(G_OBJECT (m_gst_playbin), "uri", uri, NULL);
g_object_set(G_OBJECT (m_gst_playbin), "flags", flags, NULL);
g_object_set(G_OBJECT(m_gst_playbin), "uri", uri, NULL);
g_object_set(G_OBJECT(m_gst_playbin), "flags", flags, NULL);
//gstbus handler
GstBus * bus = gst_pipeline_get_bus( GST_PIPELINE(m_gst_playbin) );
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_gst_playbin));
gst_bus_set_sync_handler(bus, Gst_bus_call, NULL);
gst_object_unref(bus);
gst_object_unref(bus);
// state playing
gst_element_set_state(GST_ELEMENT(m_gst_playbin), GST_STATE_PLAYING);
playing = true;
playstate = STATE_PLAY;
}
@@ -431,32 +433,32 @@ bool cPlayback::Start(char *filename, int /*vpid*/, int /*vtype*/, int /*apid*/,
{
hal_info("%s:%s - failed to create GStreamer pipeline!, sorry we can not play\n", FILENAME, __FUNCTION__);
playing = false;
return false;
}
g_free(uri);
g_free(uri);
// set buffer size
/* increase the default 2 second / 2 MB buffer limitations to 5s / 5MB */
int m_buffer_size = 5*1024*1024;
int m_buffer_size = 5 * 1024 * 1024;
//g_object_set(G_OBJECT(m_gst_playbin), "buffer-duration", 5LL * GST_SECOND, NULL);
g_object_set(G_OBJECT(m_gst_playbin), "buffer-size", m_buffer_size, NULL);
return true;
}
bool cPlayback::Play(void)
{
hal_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
hal_info("%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
if(playing == true)
if (playing == true)
return true;
if(m_gst_playbin)
if (m_gst_playbin)
{
gst_element_set_state(GST_ELEMENT(m_gst_playbin), GST_STATE_PLAYING);
playing = true;
playstate = STATE_PLAY;
}
@@ -466,35 +468,35 @@ bool cPlayback::Play(void)
}
bool cPlayback::Stop(void)
{
if(playing == false)
{
if (playing == false)
return false;
hal_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
hal_info("%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
// stop
if(m_gst_playbin)
if (m_gst_playbin)
{
gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
}
playing = false;
hal_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
hal_info("%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
playstate = STATE_STOP;
return true;
}
bool cPlayback::SetAPid(int pid , bool /*ac3*/)
bool cPlayback::SetAPid(int pid, bool /*ac3*/)
{
hal_info("%s: pid %i\n", __func__, pid);
int current_audio;
if(pid != mAudioStream)
if (pid != mAudioStream)
{
g_object_set (G_OBJECT (m_gst_playbin), "current-audio", pid, NULL);
g_object_set(G_OBJECT(m_gst_playbin), "current-audio", pid, NULL);
printf("%s: switched to audio stream %i\n", __FUNCTION__, pid);
mAudioStream = pid;
}
@@ -508,18 +510,18 @@ void cPlayback::trickSeek(int ratio)
gint64 pos = 0;
int position;
int duration;
if( GetPosition(position, duration) )
if (GetPosition(position, duration))
{
validposition = true;
pos = position;
}
gst_element_set_state(m_gst_playbin, GST_STATE_PLAYING);
if (validposition)
{
if(ratio >= 0.0)
if (ratio >= 0.0)
gst_element_seek(m_gst_playbin, ratio, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP), GST_SEEK_TYPE_SET, pos, GST_SEEK_TYPE_SET, -1);
else
gst_element_seek(m_gst_playbin, ratio, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP), GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos);
@@ -527,23 +529,23 @@ void cPlayback::trickSeek(int ratio)
}
bool cPlayback::SetSpeed(int speed)
{
hal_info( "%s:%s speed %d\n", FILENAME, __FUNCTION__, speed);
{
hal_info("%s:%s speed %d\n", FILENAME, __FUNCTION__, speed);
if(playing == false)
if (playing == false)
return false;
if(m_gst_playbin)
{
if (m_gst_playbin)
{
// pause
if(speed == 0)
if (speed == 0)
{
gst_element_set_state(m_gst_playbin, GST_STATE_PAUSED);
//trickSeek(0);
playstate = STATE_PAUSE;
}
// play/continue
else if(speed == 1)
else if (speed == 1)
{
trickSeek(1);
//gst_element_set_state(m_gst_playbin, GST_STATE_PLAYING);
@@ -551,14 +553,14 @@ bool cPlayback::SetSpeed(int speed)
playstate = STATE_PLAY;
}
//ff
else if(speed > 1)
else if (speed > 1)
{
trickSeek(speed);
//
playstate = STATE_FF;
}
//rf
else if(speed < 0)
else if (speed < 0)
{
trickSeek(speed);
//
@@ -572,13 +574,13 @@ bool cPlayback::SetSpeed(int speed)
}
bool cPlayback::SetSlow(int slow)
{
hal_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
{
hal_info("%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
if(playing == false)
if (playing == false)
return false;
if(m_gst_playbin)
if (m_gst_playbin)
{
trickSeek(0.5);
}
@@ -600,27 +602,27 @@ bool cPlayback::GetSpeed(int &speed) const
// in milliseconds
bool cPlayback::GetPosition(int &position, int &duration)
{
if(playing == false)
return false;
if (playing == false)
return false;
//EOF
if(end_eof)
if (end_eof)
{
end_eof = 0;
return false;
}
if(m_gst_playbin)
if (m_gst_playbin)
{
//position
GstFormat fmt = GST_FORMAT_TIME; //Returns time in nanosecs
gint64 pts = 0;
unsigned long long int sec = 0;
gst_element_query_position(m_gst_playbin, &fmt, &pts);
position = pts / 1000000.0;
// duration
GstFormat fmt_d = GST_FORMAT_TIME; //Returns time in nanosecs
double length = 0;
@@ -628,94 +630,94 @@ bool cPlayback::GetPosition(int &position, int &duration)
gst_element_query_duration(m_gst_playbin, &fmt_d, &len);
length = len / 1000000.0;
if(length < 0)
if (length < 0)
length = 0;
duration = (int)(length);
}
return true;
}
bool cPlayback::SetPosition(int position, bool absolute)
{
hal_info("%s: pos %d abs %d playing %d\n", __func__, position, absolute, playing);
if(playing == false)
if (playing == false)
return false;
gint64 time_nanoseconds;
gint64 pos;
GstFormat fmt = GST_FORMAT_TIME;
if(m_gst_playbin)
if (m_gst_playbin)
{
gst_element_query_position(m_gst_playbin, &fmt, &pos);
time_nanoseconds = pos + (position * 1000000.0);
if(time_nanoseconds < 0)
if (time_nanoseconds < 0)
time_nanoseconds = 0;
gst_element_seek(m_gst_playbin, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, time_nanoseconds, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
}
return true;
}
void cPlayback::FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string * language)
{
hal_info( "%s:%s\n", FILENAME, __FUNCTION__);
void cPlayback::FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string *language)
{
hal_info("%s:%s\n", FILENAME, __FUNCTION__);
if(m_gst_playbin)
if (m_gst_playbin)
{
gint i, n_audio = 0;
//GstStructure * structure = NULL;
// get audio
g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL);
g_object_get(m_gst_playbin, "n-audio", &n_audio, NULL);
printf("%s: %d audio\n", __FUNCTION__, n_audio);
if(n_audio == 0)
if (n_audio == 0)
return;
for (i = 0; i < n_audio; i++)
{
// apids
apids[i]=i;
GstPad * pad = 0;
g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
GstCaps * caps = gst_pad_get_negotiated_caps(pad);
apids[i] = i;
GstPad *pad = 0;
g_signal_emit_by_name(m_gst_playbin, "get-audio-pad", i, &pad);
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
if (!caps)
continue;
GstStructure * structure = gst_caps_get_structure(caps, 0);
GstStructure *structure = gst_caps_get_structure(caps, 0);
//const gchar *g_type = gst_structure_get_name(structure);
//if (!structure)
//return atUnknown;
//return atUnknown;
//ac3flags[0] = 0;
// ac3flags
if ( gst_structure_has_name (structure, "audio/mpeg"))
if (gst_structure_has_name(structure, "audio/mpeg"))
{
gint mpegversion, layer = -1;
if (!gst_structure_get_int (structure, "mpegversion", &mpegversion))
if (!gst_structure_get_int(structure, "mpegversion", &mpegversion))
//return atUnknown;
ac3flags[i] = 0;
switch (mpegversion)
switch (mpegversion)
{
case 1:
/*
{
gst_structure_get_int (structure, "layer", &layer);
if ( layer == 3 )
return atMP3;
else
return atMPEG;
ac3flags[0] = 4;
break;
gst_structure_get_int (structure, "layer", &layer);
if ( layer == 3 )
return atMP3;
else
return atMPEG;
ac3flags[0] = 4;
break;
}
*/
ac3flags[i] = 4;
@@ -730,35 +732,35 @@ void cPlayback::FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *nu
ac3flags[i] = 0;
}
}
else if ( gst_structure_has_name (structure, "audio/x-ac3") || gst_structure_has_name (structure, "audio/ac3") )
else if (gst_structure_has_name(structure, "audio/x-ac3") || gst_structure_has_name(structure, "audio/ac3"))
//return atAC3;
ac3flags[i] = 1;
else if ( gst_structure_has_name (structure, "audio/x-dts") || gst_structure_has_name (structure, "audio/dts") )
else if (gst_structure_has_name(structure, "audio/x-dts") || gst_structure_has_name(structure, "audio/dts"))
//return atDTS;
ac3flags[i] = 6;
else if ( gst_structure_has_name (structure, "audio/x-raw-int") )
else if (gst_structure_has_name(structure, "audio/x-raw-int"))
//return atPCM;
ac3flags[i] = 0;
gst_caps_unref(caps);
}
// numpids
*numpida=i;
*numpida = i;
}
}
void cPlayback::getMeta()
{
if(playing)
if (playing)
return;
}
bool cPlayback::SyncAV(void)
{
hal_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
hal_info("%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
if(playing == false )
if (playing == false)
return false;
return true;

File diff suppressed because it is too large Load Diff

View File

@@ -5,10 +5,11 @@
#include <stdint.h>
#include <vector>
typedef enum {
typedef enum
{
PLAYMODE_TS = 0,
PLAYMODE_FILE,
} playmode_t;
} playmode_t;
struct AVFormatContext;
class cPlayback
@@ -27,9 +28,15 @@ class cPlayback
bool SetAPid(int pid, bool ac3);
bool SetSubtitlePid(int pid);
bool SetTeletextPid(int pid);
int GetAPid(void) { return mAudioStream; }
int GetAPid(void)
{
return mAudioStream;
}
int GetVPid(void);
int GetSubtitlePid(void) { return mSubtitleStream; }
int GetSubtitlePid(void)
{
return mSubtitleStream;
}
int GetTeletextPid(void);
void SuspendSubtitle(bool);
int GetFirstTeletextPid(void);
@@ -39,14 +46,23 @@ class cPlayback
void GetPts(uint64_t &pts);
bool SetPosition(int position, bool absolute = false);
void FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string *language);
void FindAllPids(uint16_t *apids, unsigned short *ac3flags, uint16_t *numpida, std::string *language) { FindAllPids((int*) apids, (unsigned int*) ac3flags, (unsigned int*) numpida, language); };
void FindAllPids(uint16_t *apids, unsigned short *ac3flags, uint16_t *numpida, std::string *language)
{
FindAllPids((int *) apids, (unsigned int *) ac3flags, (unsigned int *) numpida, language);
};
void FindAllSubtitlePids(int *pids, unsigned int *numpids, std::string *language);
void FindAllTeletextsubtitlePids(int *pids, unsigned int *numpidt, std::string *tlanguage, int *mags, int *pages);
void RequestAbort(void);
bool IsPlaying(void) { return false; }
bool IsPlaying(void)
{
return false;
}
uint64_t GetReadCount(void);
void FindAllSubs(int *pids, unsigned int *supported, unsigned int *numpida, std::string *language);
void FindAllSubs(uint16_t *pids, unsigned short *supported, uint16_t *numpida, std::string *language) { FindAllSubs((int*) pids, (unsigned int*) supported, (unsigned int*) numpida, language); };
void FindAllSubs(uint16_t *pids, unsigned short *supported, uint16_t *numpida, std::string *language)
{
FindAllSubs((int *) pids, (unsigned int *) supported, (unsigned int *) numpida, language);
};
bool SelectSubtitles(int pid, std::string charset = "");
void GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current);
void SetTitle(int title);
@@ -54,7 +70,10 @@ class cPlayback
void GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values);
//
~cPlayback();
AVFormatContext *GetAVFormatContext(){ return NULL; }
AVFormatContext *GetAVFormatContext()
{
return NULL;
}
void ReleaseAVFormatContext() {}
};

View File

@@ -65,13 +65,14 @@ extern bool HAL_nodec;
static uint8_t *dmxbuf;
static int bufpos;
static const AVRational aspect_ratios[6] = {
static const AVRational aspect_ratios[6] =
{
{ 1, 1 },
{ 4, 3 },
{ 14, 9 },
{ 16, 9 },
{ 20, 9 },
{ -1,-1 }
{ -1, -1 }
};
cVideo::cVideo(int, void *, void *, unsigned int)
@@ -133,15 +134,15 @@ int cVideo::getAspectRatio(void)
ar = w * 100 * a.num / h / a.den;
if (ar < 100 || ar > 225) /* < 4:3, > 20:9 */
; /* ret = 0: N/A */
else if (ar < 140) /* 4:3 */
else if (ar < 140) /* 4:3 */
ret = 1;
else if (ar < 165) /* 14:9 */
else if (ar < 165) /* 14:9 */
ret = 2;
else if (ar < 200) /* 16:9 */
else if (ar < 200) /* 16:9 */
ret = 3;
else
ret = 4; /* 20:9 */
out:
ret = 4; /* 20:9 */
out:
buf_m.unlock();
return ret;
}
@@ -163,7 +164,8 @@ int cVideo::Start(void *, unsigned short, unsigned short, void *)
int cVideo::Stop(bool)
{
hal_debug("%s running %d >\n", __func__, thread_running);
if (thread_running) {
if (thread_running)
{
thread_running = false;
OpenThreads::Thread::join();
}
@@ -180,9 +182,9 @@ int cVideo::GetVideoSystem()
{
int current_video_system = VIDEO_STD_1080I50;
if(dec_w < 720)
if (dec_w < 720)
current_video_system = VIDEO_STD_PAL;
else if(dec_w > 720 && dec_w <= 1280)
else if (dec_w > 720 && dec_w <= 1280)
current_video_system = VIDEO_STD_720P50;
return current_video_system;
@@ -191,7 +193,7 @@ int cVideo::GetVideoSystem()
int cVideo::SetVideoSystem(int system, bool)
{
int h;
switch(system)
switch (system)
{
case VIDEO_STD_NTSC:
case VIDEO_STD_480P:
@@ -211,7 +213,7 @@ int cVideo::SetVideoSystem(int system, bool)
break;
case VIDEO_STD_AUTO:
hal_info("%s: VIDEO_STD_AUTO not implemented\n", __func__);
// fallthrough
// fallthrough
case VIDEO_STD_SECAM:
case VIDEO_STD_PAL:
case VIDEO_STD_576P:
@@ -261,17 +263,21 @@ bool cVideo::ShowPicture(const char *fname)
AVFrame *frame, *rgbframe;
AVPacket avpkt;
if (avformat_open_input(&avfc, fname, NULL, NULL) < 0) {
if (avformat_open_input(&avfc, fname, NULL, NULL) < 0)
{
hal_info("%s: Could not open file %s\n", __func__, fname);
return ret;
}
if (avformat_find_stream_info(avfc, NULL) < 0) {
if (avformat_find_stream_info(avfc, NULL) < 0)
{
hal_info("%s: Could not find file info %s\n", __func__, fname);
goto out_close;
}
for (i = 0; i < avfc->nb_streams; i++) {
if (avfc->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
for (i = 0; i < avfc->nb_streams; i++)
{
if (avfc->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
stream_id = i;
break;
}
@@ -281,24 +287,28 @@ bool cVideo::ShowPicture(const char *fname)
p = avfc->streams[stream_id]->codecpar;
codec = avcodec_find_decoder(p->codec_id);
c = avcodec_alloc_context3(codec);
if (avcodec_open2(c, codec, NULL) < 0) {
if (avcodec_open2(c, codec, NULL) < 0)
{
hal_info("%s: Could not find/open the codec, id 0x%x\n", __func__, p->codec_id);
goto out_close;
}
frame = av_frame_alloc();
rgbframe = av_frame_alloc();
if (!frame || !rgbframe) {
if (!frame || !rgbframe)
{
hal_info("%s: Could not allocate video frame\n", __func__);
goto out_free;
}
av_init_packet(&avpkt);
if (av_read_frame(avfc, &avpkt) < 0) {
if (av_read_frame(avfc, &avpkt) < 0)
{
hal_info("%s: av_read_frame < 0\n", __func__);
goto out_free;
}
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(57,37,100)
av_ret = avcodec_decode_video2(c, frame, &got_frame, &avpkt);
if (av_ret < 0) {
if (av_ret < 0)
{
hal_info("%s: avcodec_decode_video2 %d\n", __func__, av_ret);
av_packet_unref(&avpkt);
goto out_free;
@@ -307,7 +317,8 @@ bool cVideo::ShowPicture(const char *fname)
hal_info("%s: WARN: pkt->size %d != len %d\n", __func__, avpkt.size, av_ret);
#else
av_ret = avcodec_send_packet(c, &avpkt);
if (av_ret != 0 && av_ret != AVERROR(EAGAIN)) {
if (av_ret != 0 && av_ret != AVERROR(EAGAIN))
{
hal_info("%s: avcodec_send_packet %d\n", __func__, av_ret);
av_packet_unref(&avpkt);
goto out_free;
@@ -319,22 +330,24 @@ bool cVideo::ShowPicture(const char *fname)
got_frame = 1;
#endif
if (got_frame) {
if (got_frame)
{
unsigned int need = av_image_get_buffer_size(VDEC_PIXFMT, c->width, c->height, 1);
struct SwsContext *convert = sws_getContext(c->width, c->height, c->pix_fmt,
c->width, c->height, VDEC_PIXFMT,
SWS_BICUBIC, 0, 0, 0);
c->width, c->height, VDEC_PIXFMT,
SWS_BICUBIC, 0, 0, 0);
if (!convert)
hal_info("%s: ERROR setting up SWS context\n", __func__);
else {
else
{
buf_m.lock();
SWFramebuffer *f = &buffers[buf_in];
if (f->size() < need)
f->resize(need);
av_image_fill_arrays(rgbframe->data, rgbframe->linesize, &(*f)[0], VDEC_PIXFMT,
c->width, c->height, 1);
c->width, c->height, 1);
sws_scale(convert, frame->data, frame->linesize, 0, c->height,
rgbframe->data, rgbframe->linesize);
rgbframe->data, rgbframe->linesize);
sws_freeContext(convert);
f->width(c->width);
f->height(c->height);
@@ -344,7 +357,8 @@ bool cVideo::ShowPicture(const char *fname)
buf_in++;
buf_in %= VDEC_MAXBUFS;
buf_num++;
if (buf_num > (VDEC_MAXBUFS - 1)) {
if (buf_num > (VDEC_MAXBUFS - 1))
{
hal_debug("%s: buf_num overflow\n", __func__);
buf_out++;
buf_out %= VDEC_MAXBUFS;
@@ -355,12 +369,12 @@ bool cVideo::ShowPicture(const char *fname)
}
}
av_packet_unref(&avpkt);
out_free:
out_free:
avcodec_close(c);
av_free(c);
av_frame_free(&frame);
av_frame_free(&rgbframe);
out_close:
out_close:
avformat_close_input(&avfc);
hal_debug("%s(%s) end\n", __func__, fname);
return ret;
@@ -396,7 +410,8 @@ void cVideo::getPictureInfo(int &width, int &height, int &rate)
{
width = dec_w;
height = dec_h;
switch (dec_r) {
switch (dec_r)
{
case 23://23.976fps
rate = VIDEO_FRAME_RATE_23_976;
break;
@@ -437,7 +452,8 @@ int cVideo::SetStreamType(VIDEO_FORMAT v)
cVideo::SWFramebuffer *cVideo::getDecBuf(void)
{
buf_m.lock();
if (buf_num == 0) {
if (buf_num == 0)
{
buf_m.unlock();
return NULL;
}
@@ -452,8 +468,10 @@ cVideo::SWFramebuffer *cVideo::getDecBuf(void)
static int my_read(void *, uint8_t *buf, int buf_size)
{
int tmp = 0;
if (videoDecoder && bufpos < DMX_BUF_SZ - 4096) {
while (bufpos < buf_size && ++tmp < 20) { /* retry max 20 times */
if (videoDecoder && bufpos < DMX_BUF_SZ - 4096)
{
while (bufpos < buf_size && ++tmp < 20) /* retry max 20 times */
{
int ret = videoDemux->Read(dmxbuf + bufpos, DMX_BUF_SZ - bufpos, 20);
if (ret > 0)
bufpos += ret;
@@ -461,7 +479,8 @@ static int my_read(void *, uint8_t *buf, int buf_size)
}
if (bufpos == 0)
return 0;
if (bufpos > buf_size) {
if (bufpos > buf_size)
{
memcpy(buf, dmxbuf, buf_size);
memmove(dmxbuf, dmxbuf + buf_size, bufpos - buf_size);
bufpos -= buf_size;
@@ -478,7 +497,7 @@ void cVideo::run(void)
hal_info("====================== start decoder thread ================================\n");
AVCodec *codec;
AVCodecParameters *p = NULL;
AVCodecContext *c= NULL;
AVCodecContext *c = NULL;
AVFormatContext *avfc = NULL;
AVInputFormat *inp;
AVFrame *frame, *rgbframe;
@@ -499,18 +518,19 @@ void cVideo::run(void)
av_init_packet(&avpkt);
inp = av_find_input_format("mpegts");
AVIOContext *pIOCtx = avio_alloc_context(inbuf, INBUF_SIZE, // internal Buffer and its size
0, // bWriteable (1=true,0=false)
NULL, // user data; will be passed to our callback functions
my_read, // read callback
NULL, // write callback
NULL); // seek callback
0, // bWriteable (1=true,0=false)
NULL, // user data; will be passed to our callback functions
my_read, // read callback
NULL, // write callback
NULL); // seek callback
avfc = avformat_alloc_context();
avfc->pb = pIOCtx;
avfc->iformat = inp;
avfc->probesize = 188*5;
avfc->probesize = 188 * 5;
thread_running = true;
if (avformat_open_input(&avfc, NULL, inp, NULL) < 0) {
if (avformat_open_input(&avfc, NULL, inp, NULL) < 0)
{
hal_info("%s: Could not open input\n", __func__);
goto out;
}
@@ -529,25 +549,31 @@ void cVideo::run(void)
hal_info("%s: no video codec? 0x%x\n", __func__, p->codec_type);
codec = avcodec_find_decoder(p->codec_id);
if (!codec) {
if (!codec)
{
hal_info("%s: Codec for %s not found\n", __func__, avcodec_get_name(p->codec_id));
goto out;
}
c = avcodec_alloc_context3(codec);
if (avcodec_open2(c, codec, NULL) < 0) {
if (avcodec_open2(c, codec, NULL) < 0)
{
hal_info("%s: Could not open codec\n", __func__);
goto out;
}
frame = av_frame_alloc();
rgbframe = av_frame_alloc();
if (!frame || !rgbframe) {
if (!frame || !rgbframe)
{
hal_info("%s: Could not allocate video frame\n", __func__);
goto out2;
}
hal_info("decoding %s\n", avcodec_get_name(c->codec_id));
while (thread_running) {
if (av_read_frame(avfc, &avpkt) < 0) {
if (warn_r - time(NULL) > 4) {
while (thread_running)
{
if (av_read_frame(avfc, &avpkt) < 0)
{
if (warn_r - time(NULL) > 4)
{
hal_info("%s: av_read_frame < 0\n", __func__);
warn_r = time(NULL);
}
@@ -557,8 +583,10 @@ void cVideo::run(void)
int got_frame = 0;
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(57,37,100)
av_ret = avcodec_decode_video2(c, frame, &got_frame, &avpkt);
if (av_ret < 0) {
if (warn_d - time(NULL) > 4) {
if (av_ret < 0)
{
if (warn_d - time(NULL) > 4)
{
hal_info("%s: avcodec_decode_video2 %d\n", __func__, av_ret);
warn_d = time(NULL);
}
@@ -569,8 +597,10 @@ void cVideo::run(void)
hal_info("%s: WARN: pkt->size %d != len %d\n", __func__, avpkt.size, av_ret);
#else
av_ret = avcodec_send_packet(c, &avpkt);
if (av_ret != 0 && av_ret != AVERROR(EAGAIN)) {
if (warn_d - time(NULL) > 4) {
if (av_ret != 0 && av_ret != AVERROR(EAGAIN))
{
if (warn_d - time(NULL) > 4)
{
hal_info("%s: avcodec_send_packet %d\n", __func__, av_ret);
warn_d = time(NULL);
}
@@ -582,26 +612,29 @@ void cVideo::run(void)
got_frame = 1;
#endif
still_m.lock();
if (got_frame && ! stillpicture) {
if (got_frame && ! stillpicture)
{
unsigned int need = av_image_get_buffer_size(VDEC_PIXFMT, c->width, c->height, 1);
convert = sws_getCachedContext(convert,
c->width, c->height, c->pix_fmt,
c->width, c->height, VDEC_PIXFMT,
SWS_BICUBIC, 0, 0, 0);
c->width, c->height, c->pix_fmt,
c->width, c->height, VDEC_PIXFMT,
SWS_BICUBIC, 0, 0, 0);
if (!convert)
hal_info("%s: ERROR setting up SWS context\n", __func__);
else {
else
{
buf_m.lock();
SWFramebuffer *f = &buffers[buf_in];
if (f->size() < need)
f->resize(need);
av_image_fill_arrays(rgbframe->data, rgbframe->linesize, &(*f)[0], VDEC_PIXFMT,
c->width, c->height, 1);
c->width, c->height, 1);
sws_scale(convert, frame->data, frame->linesize, 0, c->height,
rgbframe->data, rgbframe->linesize);
if (dec_w != c->width || dec_h != c->height) {
rgbframe->data, rgbframe->linesize);
if (dec_w != c->width || dec_h != c->height)
{
hal_info("%s: pic changed %dx%d -> %dx%d\n", __func__,
dec_w, dec_h, c->width, c->height);
dec_w, dec_h, c->width, c->height);
dec_w = c->width;
dec_h = c->height;
w_h_changed = true;
@@ -616,14 +649,14 @@ void cVideo::run(void)
/* a/v delay determined experimentally :-) */
#if USE_OPENGL
if (v_format == VIDEO_FORMAT_MPEG2)
vpts += 90000*4/10; /* 400ms */
vpts += 90000 * 4 / 10; /* 400ms */
else
vpts += 90000*3/10; /* 300ms */
vpts += 90000 * 3 / 10; /* 300ms */
#endif
#if USE_CLUTTER
/* no idea why there's a difference between OpenGL and clutter rendering... */
if (v_format == VIDEO_FORMAT_MPEG2)
vpts += 90000*3/10; /* 300ms */
vpts += 90000 * 3 / 10; /* 300ms */
#endif
f->pts(vpts);
AVRational a = av_guess_sample_aspect_ratio(avfc, avfc->streams[0], frame);
@@ -631,41 +664,44 @@ void cVideo::run(void)
buf_in++;
buf_in %= VDEC_MAXBUFS;
buf_num++;
if (buf_num > (VDEC_MAXBUFS - 1)) {
if (buf_num > (VDEC_MAXBUFS - 1))
{
hal_debug("%s: buf_num overflow\n", __func__);
buf_out++;
buf_out %= VDEC_MAXBUFS;
buf_num--;
}
dec_r = c->time_base.den/(c->time_base.num * c->ticks_per_frame);
dec_r = c->time_base.den / (c->time_base.num * c->ticks_per_frame);
buf_m.unlock();
}
hal_debug("%s: time_base: %d/%d, ticks: %d rate: %d pts 0x%" PRIx64 "\n", __func__,
c->time_base.num, c->time_base.den, c->ticks_per_frame, dec_r,
c->time_base.num, c->time_base.den, c->ticks_per_frame, dec_r,
#if (LIBAVUTIL_VERSION_MAJOR < 54)
av_frame_get_best_effort_timestamp(frame));
av_frame_get_best_effort_timestamp(frame));
#else
frame->best_effort_timestamp);
frame->best_effort_timestamp);
#endif
} else
}
else
hal_debug("%s: got_frame: %d stillpicture: %d\n", __func__, got_frame, stillpicture);
still_m.unlock();
av_packet_unref(&avpkt);
}
sws_freeContext(convert);
out2:
out2:
avcodec_close(c);
av_free(c);
av_frame_free(&frame);
av_frame_free(&rgbframe);
out:
out:
avformat_close_input(&avfc);
av_free(pIOCtx->buffer);
av_free(pIOCtx);
/* reset output buffers */
bufpos = 0;
still_m.lock();
if (!stillpicture) {
if (!stillpicture)
{
buf_num = 0;
buf_in = 0;
buf_out = 0;
@@ -680,50 +716,57 @@ static bool swscale(unsigned char *src, unsigned char *dst, int sw, int sh, int
int len = 0;
struct SwsContext *scale = NULL;
scale = sws_getCachedContext(scale, sw, sh, sfmt, dw, dh, AV_PIX_FMT_RGB32, SWS_BICUBIC, 0, 0, 0);
if (!scale) {
if (!scale)
{
hal_info_c("%s: ERROR setting up SWS context\n", __func__);
return ret;
}
AVFrame *sframe = av_frame_alloc();
AVFrame *dframe = av_frame_alloc();
if (sframe && dframe) {
if (sframe && dframe)
{
len = av_image_fill_arrays(sframe->data, sframe->linesize, &(src)[0], sfmt, sw, sh, 1);
if(len>-1)
if (len > -1)
ret = true;
if(ret && (len = av_image_fill_arrays(dframe->data, dframe->linesize, &(dst)[0], AV_PIX_FMT_RGB32, dw, dh, 1)<0))
if (ret && (len = av_image_fill_arrays(dframe->data, dframe->linesize, &(dst)[0], AV_PIX_FMT_RGB32, dw, dh, 1) < 0))
ret = false;
if(ret && (len = sws_scale(scale, sframe->data, sframe->linesize, 0, sh, dframe->data, dframe->linesize)<0))
if (ret && (len = sws_scale(scale, sframe->data, sframe->linesize, 0, sh, dframe->data, dframe->linesize) < 0))
ret = false;
else
ret = true;
}else{
}
else
{
hal_info_c("%s: could not alloc sframe (%p) or dframe (%p)\n", __func__, sframe, dframe);
ret = false;
}
if(sframe){
if (sframe)
{
av_frame_free(&sframe);
sframe = NULL;
}
if(dframe){
if (dframe)
{
av_frame_free(&dframe);
dframe = NULL;
}
if(scale){
if (scale)
{
sws_freeContext(scale);
scale = NULL;
}
hal_info_c("%s: %s scale %ix%i to %ix%i ,len %i\n",ret?" ":"ERROR",__func__, sw, sh, dw, dh,len);
hal_info_c("%s: %s scale %ix%i to %ix%i ,len %i\n", ret ? " " : "ERROR", __func__, sw, sh, dw, dh, len);
return ret;
}
bool cVideo::GetScreenImage(unsigned char * &data, int &xres, int &yres, bool get_video, bool get_osd, bool scale_to_video)
bool cVideo::GetScreenImage(unsigned char *&data, int &xres, int &yres, bool get_video, bool get_osd, bool scale_to_video)
{
hal_info("%s: data 0x%p xres %d yres %d vid %d osd %d scale %d\n",
__func__, data, xres, yres, get_video, get_osd, scale_to_video);
__func__, data, xres, yres, get_video, get_osd, scale_to_video);
SWFramebuffer video;
std::vector<unsigned char> *osd = NULL;
std::vector<unsigned char> s_osd; /* scaled OSD */
@@ -732,13 +775,15 @@ bool cVideo::GetScreenImage(unsigned char * &data, int &xres, int &yres, bool ge
int osd_h = glfb_priv->getOSDHeight();
xres = osd_w;
yres = osd_h;
if (get_video) {
if (get_video)
{
buf_m.lock();
video = buffers[buf_out];
buf_m.unlock();
vid_w = video.width();
vid_h = video.height();
if (scale_to_video || !get_osd) {
if (scale_to_video || !get_osd)
{
xres = vid_w;
yres = vid_h;
AVRational a = video.AR();
@@ -747,8 +792,9 @@ bool cVideo::GetScreenImage(unsigned char * &data, int &xres, int &yres, bool ge
xres = vid_w * a.num / a.den;
}
}
if(video.empty()){
get_video=false;
if (video.empty())
{
get_video = false;
xres = osd_w;
yres = osd_h;
}
@@ -756,53 +802,66 @@ bool cVideo::GetScreenImage(unsigned char * &data, int &xres, int &yres, bool ge
osd = glfb_priv->getOSDBuffer();
unsigned int need = av_image_get_buffer_size(AV_PIX_FMT_RGB32, xres, yres, 1);
data = (unsigned char *)realloc(data, need); /* will be freed by caller */
if (data == NULL) /* out of memory? */
if (data == NULL) /* out of memory? */
return false;
if (get_video) {
if (get_video)
{
#if USE_OPENGL //memcpy dont work with copy BGR24 to RGB32
if (vid_w != xres || vid_h != yres){ /* scale video into data... */
if (vid_w != xres || vid_h != yres) /* scale video into data... */
{
#endif
bool ret = swscale(&video[0], data, vid_w, vid_h, xres, yres,VDEC_PIXFMT);
if(!ret){
bool ret = swscale(&video[0], data, vid_w, vid_h, xres, yres, VDEC_PIXFMT);
if (!ret)
{
free(data);
return false;
}
#if USE_OPENGL //memcpy dont work with copy BGR24 to RGB32
}else{ /* get_video and no fancy scaling needed */
}
else /* get_video and no fancy scaling needed */
{
memcpy(data, &video[0], xres * yres * sizeof(uint32_t));
}
#endif
}
if (get_osd && (osd_w != xres || osd_h != yres)) {
if (get_osd && (osd_w != xres || osd_h != yres))
{
/* rescale osd */
s_osd.resize(need);
bool ret = swscale(&(*osd)[0], &s_osd[0], osd_w, osd_h, xres, yres,AV_PIX_FMT_RGB32);
if(!ret){
bool ret = swscale(&(*osd)[0], &s_osd[0], osd_w, osd_h, xres, yres, AV_PIX_FMT_RGB32);
if (!ret)
{
free(data);
return false;
}
osd = &s_osd;
}
if (get_video && get_osd) {
if (get_video && get_osd)
{
/* alpha blend osd onto data (video). TODO: maybe libavcodec can do this? */
uint32_t *d = (uint32_t *)data;
uint32_t *pixpos = (uint32_t *)&(*osd)[0];
for (int count = 0; count < yres; count++) {
for (int count2 = 0; count2 < xres; count2++ ) {
uint32_t *pixpos = (uint32_t *) & (*osd)[0];
for (int count = 0; count < yres; count++)
{
for (int count2 = 0; count2 < xres; count2++)
{
uint32_t pix = *pixpos;
if ((pix & 0xff000000) == 0xff000000)
*d = pix;
else {
else
{
uint8_t *in = (uint8_t *)(pixpos);
uint8_t *out = (uint8_t *)d;
int a = in[3]; /* TODO: big/little endian? */
int a = in[3]; /* TODO: big/little endian? */
*out = (*out + ((*in - *out) * a) / 256);
in++; out++;
in++;
out++;
*out = (*out + ((*in - *out) * a) / 256);
in++; out++;
in++;
out++;
*out = (*out + ((*in - *out) * a) / 256);
}
d++;

View File

@@ -11,7 +11,8 @@ extern "C" {
#include <libavutil/rational.h>
}
typedef enum {
typedef enum
{
ANALOG_SD_RGB_CINCH = 0x00,
ANALOG_SD_YPRPB_CINCH,
ANALOG_HD_RGB_CINCH,
@@ -23,7 +24,8 @@ typedef enum {
ANALOG_SCART_MASK = 0x10
} analog_mode_t;
typedef enum {
typedef enum
{
VIDEO_FORMAT_MPEG2 = 0,
VIDEO_FORMAT_MPEG4_H264,
VIDEO_FORMAT_VC1,
@@ -34,7 +36,8 @@ typedef enum {
VIDEO_FORMAT_AVS = 16
} VIDEO_FORMAT;
typedef enum {
typedef enum
{
VIDEO_SD = 0,
VIDEO_HD,
VIDEO_120x60i,
@@ -43,7 +46,8 @@ typedef enum {
VIDEO_360x288i
} VIDEO_DEFINITION;
typedef enum {
typedef enum
{
VIDEO_FRAME_RATE_23_976 = 0,
VIDEO_FRAME_RATE_24,
VIDEO_FRAME_RATE_25,
@@ -54,7 +58,8 @@ typedef enum {
VIDEO_FRAME_RATE_60
} VIDEO_FRAME_RATE;
typedef enum {
typedef enum
{
DISPLAY_AR_1_1,
DISPLAY_AR_4_3,
DISPLAY_AR_14_9,
@@ -63,20 +68,23 @@ typedef enum {
DISPLAY_AR_RAW
} DISPLAY_AR;
typedef enum {
typedef enum
{
DISPLAY_AR_MODE_PANSCAN = 0,
DISPLAY_AR_MODE_LETTERBOX,
DISPLAY_AR_MODE_NONE,
DISPLAY_AR_MODE_PANSCAN2
} DISPLAY_AR_MODE;
typedef enum {
typedef enum
{
VIDEO_DB_DR_NEITHER = 0,
VIDEO_DB_ON,
VIDEO_DB_DR_BOTH
} VIDEO_DB_DR;
typedef enum {
typedef enum
{
VIDEO_PLAY_STILL = 0,
VIDEO_PLAY_CLIP,
VIDEO_PLAY_TRICK,
@@ -84,7 +92,8 @@ typedef enum {
VIDEO_PLAY_MOTION_NO_SYNC
} VIDEO_PLAY_MODE;
typedef enum {
typedef enum
{
VIDEO_STD_NTSC,
VIDEO_STD_SECAM,
VIDEO_STD_PAL,
@@ -98,7 +107,7 @@ typedef enum {
VIDEO_STD_1080P24,
VIDEO_STD_1080P25,
VIDEO_STD_AUTO,
VIDEO_STD_1080P50, /* SPARK only */
VIDEO_STD_1080P50, /* SPARK only */
VIDEO_STD_1080P60,
VIDEO_STD_1080P2397,
VIDEO_STD_1080P2997,
@@ -110,13 +119,15 @@ typedef enum {
} VIDEO_STD;
/* not used, for dummy functions */
typedef enum {
typedef enum
{
VIDEO_HDMI_CEC_MODE_OFF = 0,
VIDEO_HDMI_CEC_MODE_TUNER,
VIDEO_HDMI_CEC_MODE_RECORDER
} VIDEO_HDMI_CEC_MODE;
typedef enum {
typedef enum
{
VIDEO_HDMI_CEC_VOL_OFF = 0,
VIDEO_HDMI_CEC_VOL_AUDIOSYSTEM = 1,
VIDEO_HDMI_CEC_VOL_TV = 2
@@ -136,27 +147,51 @@ typedef enum
#define VDEC_MAXBUFS 0x40
class cVideo : public OpenThreads::Thread
{
friend class GLFbPC;
friend class cDemux;
friend class GLFbPC;
friend class cDemux;
private:
/* called from GL thread */
class SWFramebuffer : public std::vector<unsigned char>
{
public:
SWFramebuffer() : mWidth(0), mHeight(0) {}
void width(int w) { mWidth = w; }
void height(int h) { mHeight = h; }
void pts(uint64_t p) { mPts = p; }
void AR(AVRational a) { mAR = a; }
int width() const { return mWidth; }
int height() const { return mHeight; }
int64_t pts() const { return mPts; }
AVRational AR() const { return mAR; }
private:
int mWidth;
int mHeight;
int64_t mPts;
AVRational mAR;
public:
SWFramebuffer() : mWidth(0), mHeight(0) {}
void width(int w)
{
mWidth = w;
}
void height(int h)
{
mHeight = h;
}
void pts(uint64_t p)
{
mPts = p;
}
void AR(AVRational a)
{
mAR = a;
}
int width() const
{
return mWidth;
}
int height() const
{
return mHeight;
}
int64_t pts() const
{
return mPts;
}
AVRational AR() const
{
return mAR;
}
private:
int mWidth;
int mHeight;
int64_t mPts;
AVRational mAR;
};
int buf_in, buf_out, buf_num;
int64_t GetPTS(void);
@@ -165,8 +200,14 @@ class cVideo : public OpenThreads::Thread
cVideo(int mode, void *, void *, unsigned int unit = 0);
~cVideo(void);
void * GetTVEnc() { return NULL; };
void * GetTVEncSD() { return NULL; };
void *GetTVEnc()
{
return NULL;
};
void *GetTVEncSD()
{
return NULL;
};
/* aspect ratio */
int getAspectRatio(void);
@@ -193,29 +234,68 @@ class cVideo : public OpenThreads::Thread
bool Pause(void);
int SetStreamType(VIDEO_FORMAT type);
bool ShowPicture(const char * fname);
bool ShowPicture(const char *fname);
void SetSyncMode(AVSYNC_TYPE mode);
bool SetCECMode(VIDEO_HDMI_CEC_MODE) { return true; };
void SetCECAutoView(bool) { return; };
void SetCECAutoStandby(bool) { return; };
int GetAudioDestination() { return 0; };
void SetAudioDestination(int /*audio_dest*/) { return; };
bool SetCECMode(VIDEO_HDMI_CEC_MODE)
{
return true;
};
void SetCECAutoView(bool)
{
return;
};
void SetCECAutoStandby(bool)
{
return;
};
int GetAudioDestination()
{
return 0;
};
void SetAudioDestination(int audio_dest)
{
return;
};
void StopPicture();
void Standby(unsigned int bOn);
void Pig(int x, int y, int w, int h, int osd_w = 1064, int osd_h = 600, int startx = 0, int starty = 0, int endx = 1279, int endy = 719);
void SetControl(int, int) { return; };
void SetControl(int, int)
{
return;
};
void setContrast(int val);
void SetVideoMode(analog_mode_t mode);
void SetDBDR(int) { return; };
void SetAudioHandle(void *) { return; };
void SetAutoModes(int [VIDEO_STD_MAX]) { return; };
int OpenVBI(int) { return 0; };
int CloseVBI(void) { return 0; };
int StartVBI(unsigned short) { return 0; };
int StopVBI(void) { return 0; };
void SetDBDR(int)
{
return;
};
void SetAudioHandle(void *)
{
return;
};
void SetAutoModes(int [VIDEO_STD_MAX])
{
return;
};
int OpenVBI(int)
{
return 0;
};
int CloseVBI(void)
{
return 0;
};
int StartVBI(unsigned short)
{
return 0;
};
int StopVBI(void)
{
return 0;
};
void SetDemux(cDemux *dmx);
bool GetScreenImage(unsigned char * &data, int &xres, int &yres, bool get_video = true, bool get_osd = false, bool scale_to_video = false);
bool GetScreenImage(unsigned char *&data, int &xres, int &yres, bool get_video = true, bool get_osd = false, bool scale_to_video = false);
SWFramebuffer *getDecBuf(void);
private:
void run();