- move generic-pc code to libgeneric-pc directory

Signed-off-by: Thilo Graf <dbt@novatux.de>
This commit is contained in:
svenhoefer
2018-12-23 22:33:56 +01:00
committed by Thilo Graf
parent aba7114188
commit 635ec1617b
20 changed files with 2 additions and 2 deletions

60
libgeneric-pc/Makefile.am Normal file
View File

@@ -0,0 +1,60 @@
noinst_LTLIBRARIES = libgeneric.la
AM_CPPFLAGS = -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS
AM_CPPFLAGS += -Wfatal-errors
AM_CPPFLAGS += \
-I$(top_srcdir)/common \
-I$(top_srcdir)/include \
@AVUTIL_CFLAGS@ \
@CLUTTER_CFLAGS@
AM_CXXFLAGS = -fno-rtti -fno-exceptions -fno-strict-aliasing
AM_LDFLAGS = \
-lao \
-lOpenThreads \
@AVFORMAT_LIBS@ \
@AVUTIL_LIBS@ \
@AVCODEC_LIBS@ \
@SWRESAMPLE_LIBS@ \
@SWSCALE_LIBS@ \
@CLUTTER_LIBS@
if USE_OPENGL
AM_LDFLAGS += -lglut -lGL -lGLU -lGLEW -lao
endif
libgeneric_la_SOURCES = \
hardware_caps.c \
dmx.cpp \
video.cpp \
audio.cpp \
init.cpp \
record.cpp
if USE_CLUTTER
libgeneric_la_SOURCES += clutterfb.cpp
endif
if USE_OPENGL
libgeneric_la_SOURCES += glfb.cpp
endif
if ENABLE_GSTREAMER_01
libgeneric_la_SOURCES += \
playback_gst_01.cpp
AM_LDFLAGS += \
-lgstreamer-0.10 \
-lgstinterfaces-0.10
else
if ENABLE_GSTREAMER_10
libgeneric_la_SOURCES += \
playback_gst_10.cpp
AM_LDFLAGS += \
-lgstreamer-1.0 \
-lgsttag-1.0 \
-lgstmpegts-1.0
else
libgeneric_la_SOURCES += \
playback.cpp
endif
endif

493
libgeneric-pc/audio.cpp Normal file
View File

@@ -0,0 +1,493 @@
/*
* (C) 2010-2013 Stefan Seyfried
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* cAudio implementation with decoder.
* uses libao <http://www.xiph.org/ao/> for output
* ffmpeg <http://ffmpeg.org> for demuxing / decoding / format conversion
*/
#include <cstdio>
#include <cstdlib>
#include "audio_lib.h"
#include "dmx_hal.h"
#include "lt_debug.h"
#define lt_debug(args...) _lt_debug(HAL_DEBUG_AUDIO, this, args)
#define lt_info(args...) _lt_info(HAL_DEBUG_AUDIO, this, args)
#include <OpenThreads/Thread>
extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/samplefmt.h>
#include <libswresample/swresample.h>
#include <ao/ao.h>
}
/* ffmpeg buf 2k */
#define INBUF_SIZE 0x0800
/* my own buf 16k */
#define DMX_BUF_SZ 0x4000
cAudio * audioDecoder = NULL;
extern cDemux *audioDemux;
static uint8_t *dmxbuf = NULL;
static int bufpos;
extern bool HAL_nodec;
static cAudio *gThiz = NULL;
static ao_device *adevice = NULL;
static ao_sample_format sformat;
static AVCodecContext *c = NULL;
static AVCodecParameters *p = NULL;
cAudio::cAudio(void *, void *, void *)
{
thread_started = false;
if (!HAL_nodec)
dmxbuf = (uint8_t *)malloc(DMX_BUF_SZ);
bufpos = 0;
curr_pts = 0;
gThiz = this;
ao_initialize();
}
cAudio::~cAudio(void)
{
closeDevice();
free(dmxbuf);
if (adevice)
ao_close(adevice);
adevice = NULL;
ao_shutdown();
}
void cAudio::openDevice(void)
{
lt_debug("%s\n", __func__);
}
void cAudio::closeDevice(void)
{
lt_debug("%s\n", __func__);
}
int cAudio::do_mute(bool enable, bool remember)
{
lt_debug("%s(%d, %d)\n", __func__, enable, remember);
return 0;
}
int cAudio::setVolume(unsigned int left, unsigned int right)
{
lt_debug("%s(%d, %d)\n", __func__, left, right);
return 0;
}
int cAudio::Start(void)
{
lt_debug("%s >\n", __func__);
if (! HAL_nodec)
OpenThreads::Thread::start();
lt_debug("%s <\n", __func__);
return 0;
}
int cAudio::Stop(void)
{
lt_debug("%s >\n", __func__);
if (thread_started)
{
thread_started = false;
OpenThreads::Thread::join();
}
lt_debug("%s <\n", __func__);
return 0;
}
bool cAudio::Pause(bool /*Pcm*/)
{
return true;
};
void cAudio::SetSyncMode(AVSYNC_TYPE Mode)
{
lt_debug("%s %d\n", __func__, Mode);
};
void cAudio::SetStreamType(AUDIO_FORMAT type)
{
lt_debug("%s %d\n", __func__, type);
};
int cAudio::setChannel(int /*channel*/)
{
return 0;
};
int cAudio::PrepareClipPlay(int ch, int srate, int bits, int le)
{
lt_debug("%s ch %d srate %d bits %d le %d adevice %p\n", __func__, ch, srate, bits, le, adevice);;
int driver;
int byte_format = le ? AO_FMT_LITTLE : AO_FMT_BIG;
if (sformat.bits != bits || sformat.channels != ch || sformat.rate != srate ||
sformat.byte_format != byte_format || adevice == NULL)
{
driver = ao_default_driver_id();
sformat.bits = bits;
sformat.channels = ch;
sformat.rate = srate;
sformat.byte_format = byte_format;
sformat.matrix = 0;
if (adevice)
ao_close(adevice);
adevice = ao_open_live(driver, &sformat, NULL);
ao_info *ai = ao_driver_info(driver);
lt_info("%s: changed params ch %d srate %d bits %d le %d adevice %p\n",
__func__, ch, srate, bits, le, adevice);;
lt_info("libao driver: %d name '%s' short '%s' author '%s'\n",
driver, ai->name, ai->short_name, ai->author);
}
return 0;
};
int cAudio::WriteClip(unsigned char *buffer, int size)
{
lt_debug("cAudio::%s buf 0x%p size %d\n", __func__, buffer, size);
if (!adevice) {
lt_info("%s: adevice not opened?\n", __func__);
return 0;
}
ao_play(adevice, (char *)buffer, size);
return size;
};
int cAudio::StopClip()
{
lt_debug("%s\n", __func__);
#if 0
/* don't do anything - closing / reopening ao all the time makes for long delays
* reinit on-demand (e.g. for changed parameters) instead */
if (!adevice) {
lt_info("%s: adevice not opened?\n", __func__);
return 0;
}
ao_close(adevice);
adevice = NULL;
#endif
return 0;
};
void cAudio::getAudioInfo(int &type, int &layer, int &freq, int &bitrate, int &mode)
{
type = 0;
layer = 0; /* not used */
freq = 0;
bitrate = 0; /* not used, but easy to get :-) */
mode = 0; /* default: stereo */
printf("cAudio::getAudioInfo c %p\n", c);
if (c) {
switch (c->codec_id) {
case AV_CODEC_ID_MP2:
type = AUDIO_FMT_MPEG;
break;
case AV_CODEC_ID_MP3:
type = AUDIO_FMT_MP3;
break;
case AV_CODEC_ID_AC3:
case AV_CODEC_ID_TRUEHD:
type = AUDIO_FMT_DOLBY_DIGITAL;
break;
case AV_CODEC_ID_EAC3:
type = AUDIO_FMT_DD_PLUS;
break;
case AV_CODEC_ID_AAC:
type = AUDIO_FMT_AAC;
break;
case AV_CODEC_ID_DTS:
type = AUDIO_FMT_DTS;
break;
case AV_CODEC_ID_MLP:
type = AUDIO_FMT_MLP;
break;
default:
break;
}
freq = c->sample_rate;
bitrate = c->bit_rate;
if (c->channels == 1)
mode = 3; /* for AV_CODEC_ID_MP2, only stereo / mono is detected for now */
if (c->codec_id != AV_CODEC_ID_MP2) {
switch (c->channel_layout) {
case AV_CH_LAYOUT_MONO:
mode = 1; // "C"
break;
case AV_CH_LAYOUT_STEREO:
mode = 2; // "L/R"
break;
case AV_CH_LAYOUT_2_1:
case AV_CH_LAYOUT_SURROUND:
mode = 3; // "L/C/R"
break;
case AV_CH_LAYOUT_2POINT1:
mode = 4; // "L/R/S"
break;
case AV_CH_LAYOUT_3POINT1:
mode = 5; // "L/C/R/S"
break;
case AV_CH_LAYOUT_2_2:
case AV_CH_LAYOUT_QUAD:
mode = 6; // "L/R/SL/SR"
break;
case AV_CH_LAYOUT_5POINT0:
case AV_CH_LAYOUT_5POINT1:
mode = 7; // "L/C/R/SL/SR"
break;
default:
lt_info("%s: unknown ch_layout 0x%" PRIx64 "\n",
__func__, c->channel_layout);
}
}
}
lt_debug("%s t: %d l: %d f: %d b: %d m: %d codec_id: %x\n",
__func__, type, layer, freq, bitrate, mode, c?c->codec_id:-1);
};
void cAudio::SetSRS(int /*iq_enable*/, int /*nmgr_enable*/, int /*iq_mode*/, int /*iq_level*/)
{
lt_debug("%s\n", __func__);
};
void cAudio::SetHdmiDD(bool enable)
{
lt_debug("%s %d\n", __func__, enable);
};
void cAudio::SetSpdifDD(bool enable)
{
lt_debug("%s %d\n", __func__, enable);
};
void cAudio::ScheduleMute(bool On)
{
lt_debug("%s %d\n", __func__, On);
};
void cAudio::EnableAnalogOut(bool enable)
{
lt_debug("%s %d\n", __func__, enable);
};
void cAudio::setBypassMode(bool disable)
{
lt_debug("%s %d\n", __func__, disable);
}
static int _my_read(void *, uint8_t *buf, int buf_size)
{
return gThiz->my_read(buf, buf_size);
}
int cAudio::my_read(uint8_t *buf, int buf_size)
{
int tmp = 0;
if (audioDecoder && bufpos < DMX_BUF_SZ - 4096) {
while (bufpos < buf_size && ++tmp < 20) { /* retry max 20 times */
int ret = audioDemux->Read(dmxbuf + bufpos, DMX_BUF_SZ - bufpos, 10);
if (ret > 0)
bufpos += ret;
if (! thread_started)
break;
}
}
if (bufpos == 0)
return 0;
//lt_info("%s buf_size %d bufpos %d th %d tmp %d\n", __func__, buf_size, bufpos, thread_started, tmp);
if (bufpos > buf_size) {
memcpy(buf, dmxbuf, buf_size);
memmove(dmxbuf, dmxbuf + buf_size, bufpos - buf_size);
bufpos -= buf_size;
return buf_size;
}
memcpy(buf, dmxbuf, bufpos);
tmp = bufpos;
bufpos = 0;
return tmp;
}
void cAudio::run()
{
lt_info("====================== start decoder thread ================================\n");
/* libavcodec & friends */
av_register_all();
AVCodec *codec;
AVFormatContext *avfc = NULL;
AVInputFormat *inp;
AVFrame *frame;
uint8_t *inbuf = (uint8_t *)av_malloc(INBUF_SIZE);
AVPacket avpkt;
int ret, driver;
/* libao */
ao_info *ai;
// ao_device *adevice;
// ao_sample_format sformat;
/* resample */
SwrContext *swr = NULL;
uint8_t *obuf = NULL;
int obuf_sz = 0; /* in samples */
int obuf_sz_max = 0;
int o_ch, o_sr; /* output channels and sample rate */
uint64_t o_layout; /* output channels layout */
char tmp[64] = "unknown";
curr_pts = 0;
av_init_packet(&avpkt);
inp = av_find_input_format("mpegts");
AVIOContext *pIOCtx = avio_alloc_context(inbuf, INBUF_SIZE, // internal Buffer and its size
0, // bWriteable (1=true,0=false)
NULL, // user data; will be passed to our callback functions
_my_read, // read callback
NULL, // write callback
NULL); // seek callback
avfc = avformat_alloc_context();
avfc->pb = pIOCtx;
avfc->iformat = inp;
avfc->probesize = 188*5;
thread_started = true;
if (avformat_open_input(&avfc, NULL, inp, NULL) < 0) {
lt_info("%s: avformat_open_input() failed.\n", __func__);
goto out;
}
ret = avformat_find_stream_info(avfc, NULL);
lt_debug("%s: avformat_find_stream_info: %d\n", __func__, ret);
if (avfc->nb_streams != 1)
{
lt_info("%s: nb_streams: %d, should be 1!\n", __func__, avfc->nb_streams);
goto out;
}
p = avfc->streams[0]->codecpar;
if (p->codec_type != AVMEDIA_TYPE_AUDIO)
lt_info("%s: stream 0 no audio codec? 0x%x\n", __func__, p->codec_type);
codec = avcodec_find_decoder(p->codec_id);
if (!codec) {
lt_info("%s: Codec for %s not found\n", __func__, avcodec_get_name(p->codec_id));
goto out;
}
if (c)
av_free(c);
c = avcodec_alloc_context3(codec);
if (avcodec_open2(c, codec, NULL) < 0) {
lt_info("%s: avcodec_open2() failed\n", __func__);
goto out;
}
frame = av_frame_alloc();
if (!frame) {
lt_info("%s: av_frame_alloc failed\n", __func__);
goto out2;
}
/* output sample rate, channels, layout could be set here if necessary */
o_ch = p->channels; /* 2 */
o_sr = p->sample_rate; /* 48000 */
o_layout = p->channel_layout; /* AV_CH_LAYOUT_STEREO */
if (sformat.channels != o_ch || sformat.rate != o_sr ||
sformat.byte_format != AO_FMT_NATIVE || sformat.bits != 16 || adevice == NULL)
{
driver = ao_default_driver_id();
sformat.bits = 16;
sformat.channels = o_ch;
sformat.rate = o_sr;
sformat.byte_format = AO_FMT_NATIVE;
sformat.matrix = 0;
if (adevice)
ao_close(adevice);
adevice = ao_open_live(driver, &sformat, NULL);
ai = ao_driver_info(driver);
lt_info("%s: changed params ch %d srate %d bits %d adevice %p\n",
__func__, o_ch, o_sr, 16, adevice);
if(ai)
lt_info("libao driver: %d name '%s' short '%s' author '%s'\n",
driver, ai->name, ai->short_name, ai->author);
}
#if 0
lt_info(" driver options:");
for (int i = 0; i < ai->option_count; ++i)
fprintf(stderr, " %s", ai->options[i]);
fprintf(stderr, "\n");
#endif
av_get_sample_fmt_string(tmp, sizeof(tmp), c->sample_fmt);
lt_info("decoding %s, sample_fmt %d (%s) sample_rate %d channels %d\n",
avcodec_get_name(p->codec_id), c->sample_fmt, tmp, p->sample_rate, p->channels);
swr = swr_alloc_set_opts(swr,
o_layout, AV_SAMPLE_FMT_S16, o_sr, /* output */
p->channel_layout, c->sample_fmt, p->sample_rate, /* input */
0, NULL);
if (! swr) {
lt_info("could not alloc resample context\n");
goto out3;
}
swr_init(swr);
while (thread_started) {
int gotframe = 0;
if (av_read_frame(avfc, &avpkt) < 0)
break;
avcodec_decode_audio4(c, frame, &gotframe, &avpkt);
if (gotframe && thread_started) {
int out_linesize;
obuf_sz = av_rescale_rnd(swr_get_delay(swr, p->sample_rate) +
frame->nb_samples, o_sr, p->sample_rate, AV_ROUND_UP);
if (obuf_sz > obuf_sz_max) {
lt_info("obuf_sz: %d old: %d\n", obuf_sz, obuf_sz_max);
av_free(obuf);
if (av_samples_alloc(&obuf, &out_linesize, o_ch,
frame->nb_samples, AV_SAMPLE_FMT_S16, 1) < 0) {
lt_info("av_samples_alloc failed\n");
av_packet_unref(&avpkt);
break; /* while (thread_started) */
}
obuf_sz_max = obuf_sz;
}
obuf_sz = swr_convert(swr, &obuf, obuf_sz,
(const uint8_t **)frame->extended_data, frame->nb_samples);
curr_pts = av_frame_get_best_effort_timestamp(frame);
lt_debug("%s: pts 0x%" PRIx64 " %3f\n", __func__, curr_pts, curr_pts/90000.0);
int o_buf_sz = av_samples_get_buffer_size(&out_linesize, o_ch,
obuf_sz, AV_SAMPLE_FMT_S16, 1);
ao_play(adevice, (char *)obuf, o_buf_sz);
}
av_packet_unref(&avpkt);
}
// ao_close(adevice); /* can take long :-( */
av_free(obuf);
swr_free(&swr);
out3:
av_frame_free(&frame);
out2:
avcodec_close(c);
av_free(c);
c = NULL;
out:
avformat_close_input(&avfc);
av_free(pIOCtx->buffer);
av_free(pIOCtx);
lt_info("======================== end decoder thread ================================\n");
}

105
libgeneric-pc/audio_lib.h Normal file
View File

@@ -0,0 +1,105 @@
/* public header file */
#ifndef _AUDIO_LIB_H_
#define _AUDIO_LIB_H_
#include <stdint.h>
#include <OpenThreads/Thread>
#include "cs_types.h"
typedef enum
{
AUDIO_SYNC_WITH_PTS,
AUDIO_NO_SYNC,
AUDIO_SYNC_AUDIO_MASTER
} AUDIO_SYNC_MODE;
typedef enum {
HDMI_ENCODED_OFF,
HDMI_ENCODED_AUTO,
HDMI_ENCODED_FORCED
} HDMI_ENCODED_MODE;
typedef enum
{
AUDIO_FMT_AUTO = 0,
AUDIO_FMT_MPEG,
AUDIO_FMT_MP3,
AUDIO_FMT_DOLBY_DIGITAL,
AUDIO_FMT_BASIC = AUDIO_FMT_DOLBY_DIGITAL,
AUDIO_FMT_AAC,
AUDIO_FMT_AAC_PLUS,
AUDIO_FMT_DD_PLUS,
AUDIO_FMT_DTS,
AUDIO_FMT_AVS,
AUDIO_FMT_MLP,
AUDIO_FMT_WMA,
AUDIO_FMT_MPG1, // TD only. For Movieplayer / cPlayback
AUDIO_FMT_ADVANCED = AUDIO_FMT_MLP
} AUDIO_FORMAT;
class cAudio : public OpenThreads::Thread
{
friend class cPlayback;
private:
int fd;
bool Muted;
int clipfd; /* for pcm playback */
int mixer_fd; /* if we are using the OSS mixer */
int mixer_num; /* oss mixer to use, if any */
AUDIO_FORMAT StreamType;
AUDIO_SYNC_MODE SyncMode;
bool started;
bool thread_started;
int volume;
int64_t curr_pts;
void openDevice(void);
void closeDevice(void);
int do_mute(bool enable, bool remember);
void setBypassMode(bool disable);
void run();
public:
/* construct & destruct */
cAudio(void *, void *, void *);
~cAudio(void);
int64_t getPts() { return curr_pts; }
void *GetHandle() { return NULL; };
/* shut up */
int mute(bool remember = true) { return do_mute(true, remember); };
int unmute(bool remember = true) { return do_mute(false, remember); };
/* volume, min = 0, max = 255 */
int setVolume(unsigned int left, unsigned int right);
int getVolume(void) { return volume;}
bool getMuteStatus(void) { return Muted; };
/* start and stop audio */
int Start(void);
int Stop(void);
bool Pause(bool Pcm = true);
void SetStreamType(AUDIO_FORMAT type);
void SetSyncMode(AVSYNC_TYPE Mode);
/* select channels */
int setChannel(int channel);
int PrepareClipPlay(int uNoOfChannels, int uSampleRate, int uBitsPerSample, int bLittleEndian);
int WriteClip(unsigned char * buffer, int size);
int StopClip();
void getAudioInfo(int &type, int &layer, int& freq, int &bitrate, int &mode);
void SetSRS(int iq_enable, int nmgr_enable, int iq_mode, int iq_level);
bool IsHdmiDDSupported();
void SetHdmiDD(bool enable);
void SetSpdifDD(bool enable);
void ScheduleMute(bool On);
void EnableAnalogOut(bool enable);
int my_read(uint8_t *buf, int buf_size);
};
#endif

481
libgeneric-pc/clutterfb.cpp Normal file
View File

@@ -0,0 +1,481 @@
/*
Framebuffer implementation using clutter https://developer.gnome.org/clutter/
Copyright (C) 2016 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
based on the openGL framebuffer implementation
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
TODO: AV-Sync code is "experimental" at best
*/
#include "config.h"
#include <vector>
#include <sys/types.h>
#include <signal.h>
#include <cstdio>
#include <cstring>
#include <errno.h>
#include <inttypes.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <linux/input.h>
#include "glfb_priv.h"
#include "video_priv.h"
#include "audio_priv.h"
#include <clutter/x11/clutter-x11.h>
#include "lt_debug.h"
#define lt_debug_c(args...) _lt_debug(HAL_DEBUG_INIT, NULL, args)
#define lt_info_c(args...) _lt_info(HAL_DEBUG_INIT, NULL, args)
#define lt_debug(args...) _lt_debug(HAL_DEBUG_INIT, this, args)
#define lt_info(args...) _lt_info(HAL_DEBUG_INIT, this, args)
extern VDec *vdec;
extern ADec *adec;
/* the private class that does stuff only needed inside libstb-hal.
* is used e.g. by cVideo... */
GLFbPC *glfb_priv = NULL;
GLFramebuffer::GLFramebuffer(int x, int y)
{
Init();
glfb_priv = new GLFbPC(x, y, osd_buf);
si = glfb_priv->getScreenInfo();
start();
while (!glfb_priv->mInitDone)
usleep(1);
}
GLFramebuffer::~GLFramebuffer()
{
glfb_priv->mShutDown = true;
join();
delete glfb_priv;
glfb_priv = NULL;
}
void GLFramebuffer::blit()
{
glfb_priv->blit();
}
GLFbPC::GLFbPC(int x, int y, std::vector<unsigned char> &buf): mReInit(true), mShutDown(false), mInitDone(false)
{
osd_buf = &buf;
mState.width = x;
mState.height = y;
mX = &_mX[0];
mY = &_mY[0];
*mX = x;
*mY = y;
av_reduce(&mOA.num, &mOA.den, x, y, INT_MAX);
mVA = mOA; /* initial aspect ratios are from the FB resolution, those */
_mVA = mVA; /* will be updated by the videoDecoder functions anyway */
mVAchanged = true;
mCrop = DISPLAY_AR_MODE_PANSCAN;
zoom = 1.0;
xscale = 1.0;
const char *tmp = getenv("GLFB_FULLSCREEN");
mFullscreen = !!(tmp);
mState.blit = true;
last_apts = 0;
/* linux framebuffer compat mode */
si.bits_per_pixel = 32;
si.xres = mState.width;
si.xres_virtual = si.xres;
si.yres = mState.height;
si.yres_virtual = si.yres;
si.blue.length = 8;
si.blue.offset = 0;
si.green.length = 8;
si.green.offset = 8;
si.red.length = 8;
si.red.offset = 16;
si.transp.length = 8;
si.transp.offset = 24;
unlink("/tmp/neutrino.input");
mkfifo("/tmp/neutrino.input", 0600);
input_fd = open("/tmp/neutrino.input", O_RDWR|O_CLOEXEC|O_NONBLOCK);
if (input_fd < 0)
lt_info("%s: could not open /tmp/neutrino.input FIFO: %m\n", __func__);
initKeys();
}
GLFbPC::~GLFbPC()
{
mShutDown = true;
if (input_fd >= 0)
close(input_fd);
osd_buf->clear();
}
void GLFbPC::initKeys()
{
mKeyMap[CLUTTER_KEY_Up] = KEY_UP;
mKeyMap[CLUTTER_KEY_Down] = KEY_DOWN;
mKeyMap[CLUTTER_KEY_Left] = KEY_LEFT;
mKeyMap[CLUTTER_KEY_Right] = KEY_RIGHT;
mKeyMap[CLUTTER_KEY_F1] = KEY_RED;
mKeyMap[CLUTTER_KEY_F2] = KEY_GREEN;
mKeyMap[CLUTTER_KEY_F3] = KEY_YELLOW;
mKeyMap[CLUTTER_KEY_F4] = KEY_BLUE;
mKeyMap[CLUTTER_KEY_F5] = KEY_WWW;
mKeyMap[CLUTTER_KEY_F6] = KEY_SUBTITLE;
mKeyMap[CLUTTER_KEY_F7] = KEY_MOVE;
mKeyMap[CLUTTER_KEY_F8] = KEY_SLEEP;
mKeyMap[CLUTTER_KEY_Page_Up] = KEY_PAGEUP;
mKeyMap[CLUTTER_KEY_Page_Down] = KEY_PAGEDOWN;
mKeyMap[CLUTTER_KEY_Return] = KEY_OK;
mKeyMap[CLUTTER_KEY_Escape] = KEY_EXIT;
mKeyMap['e'] = KEY_EPG;
mKeyMap['i'] = KEY_INFO;
mKeyMap['m'] = KEY_MENU;
mKeyMap['+'] = KEY_VOLUMEUP;
mKeyMap['-'] = KEY_VOLUMEDOWN;
mKeyMap['.'] = KEY_MUTE;
mKeyMap['h'] = KEY_HELP;
mKeyMap['p'] = KEY_POWER;
mKeyMap['0'] = KEY_0;
mKeyMap['1'] = KEY_1;
mKeyMap['2'] = KEY_2;
mKeyMap['3'] = KEY_3;
mKeyMap['4'] = KEY_4;
mKeyMap['5'] = KEY_5;
mKeyMap['6'] = KEY_6;
mKeyMap['7'] = KEY_7;
mKeyMap['8'] = KEY_8;
mKeyMap['9'] = KEY_9;
}
static ClutterActor *stage = NULL;
static ClutterActor *fb_actor = NULL;
static ClutterActor *vid_actor = NULL;
static ClutterTimeline *tl = NULL;
void GLFramebuffer::run()
{
int argc = 1;
int x = glfb_priv->mState.width;
int y = glfb_priv->mState.height;
/* some dummy commandline for GLUT to be happy */
char *a = (char *)"neutrino";
char **argv = (char **)malloc(sizeof(char *) * 2);
argv[0] = a;
argv[1] = NULL;
lt_info("GLFB: GL thread starting x %d y %d\n", x, y);
if (clutter_init(&argc, &argv) != CLUTTER_INIT_SUCCESS) {
lt_info("GLFB: error initializing clutter\n");
return;
}
lt_info("GLFB: %s:%d\n", __func__, __LINE__);
ClutterColor stage_color = { 0, 0, 0, 255 };
stage = clutter_stage_new();
clutter_actor_set_size(stage, x, y);
clutter_actor_set_background_color(stage, &stage_color);
clutter_actor_set_content_gravity(stage, CLUTTER_CONTENT_GRAVITY_RESIZE_ASPECT);
//g_signal_connect(stage, "destroy", G_CALLBACK(clutter_main_quit), NULL);
g_signal_connect(stage, "key-press-event", G_CALLBACK(GLFbPC::keyboardcb), (void *)1);
g_signal_connect(stage, "key-release-event", G_CALLBACK(GLFbPC::keyboardcb), NULL);
clutter_stage_set_user_resizable(CLUTTER_STAGE (stage), TRUE);
clutter_actor_grab_key_focus(stage);
clutter_actor_show(stage);
/* 32bit FB depth, *2 because tuxtxt uses a shadow buffer */
int fbmem = x * y * 4 * 2;
osd_buf.resize(fbmem);
lt_info("GLFB: OSD buffer set to %d bytes at 0x%p\n", fbmem, osd_buf.data());
/* video plane is below FB plane, so it comes first */
vid_actor = clutter_actor_new();
ClutterContent *fb = clutter_image_new();
/* osd_buf, because it starts up black */
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf.data(), COGL_PIXEL_FORMAT_BGR_888, x, y, x*3, NULL)) {
lt_info("GLFB::%s clutter_image_set_data failed? (vid)\n", __func__);
_exit(1); /* life is hard */
}
clutter_actor_set_content(vid_actor, fb);
g_object_unref(fb);
clutter_actor_set_size(vid_actor, x, y);
clutter_actor_set_position(vid_actor, 0, 0);
clutter_actor_add_constraint(vid_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_WIDTH, 0));
clutter_actor_add_constraint(vid_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_HEIGHT, 0));
clutter_actor_add_constraint(vid_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_X, 0));
clutter_actor_add_constraint(vid_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_Y, 0));
clutter_actor_set_content_gravity(vid_actor, CLUTTER_CONTENT_GRAVITY_RESIZE_ASPECT);
clutter_actor_set_pivot_point(vid_actor, 0.5, 0.5);
clutter_actor_add_child(stage, vid_actor);
clutter_actor_show(vid_actor);
fb_actor = clutter_actor_new();
fb = clutter_image_new();
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf.data(), COGL_PIXEL_FORMAT_BGRA_8888, x, y, x*4, NULL)) {
lt_info("GLFB::%s clutter_image_set_data failed? (osd)\n", __func__);
_exit(1); /* life is hard */
}
clutter_actor_set_content(fb_actor, fb);
g_object_unref(fb);
clutter_actor_set_size(fb_actor, x, y);
clutter_actor_set_position(fb_actor, 0, 0);
clutter_actor_add_constraint(fb_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_WIDTH, 0));
clutter_actor_add_constraint(fb_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_HEIGHT, 0));
clutter_actor_add_constraint(fb_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_X, 0));
clutter_actor_add_constraint(fb_actor, clutter_bind_constraint_new(stage, CLUTTER_BIND_Y, 0));
clutter_actor_set_content_gravity(fb_actor, CLUTTER_CONTENT_GRAVITY_RESIZE_ASPECT);
clutter_actor_add_child(stage, fb_actor);
clutter_actor_show(fb_actor);
glfb_priv->mInitDone = true; /* signal that setup is finished */
tl = clutter_timeline_new(100);
g_signal_connect(tl, "new-frame", G_CALLBACK(GLFbPC::rendercb), NULL);
clutter_timeline_set_repeat_count(tl, -1);
clutter_timeline_start(tl);
clutter_main();
lt_info("GLFB: GL thread stopping\n");
}
/* static */ void GLFbPC::rendercb()
{
glfb_priv->render();
}
/* static */ bool GLFbPC::keyboardcb(ClutterActor * /*actor*/, ClutterEvent *event, gpointer user_data)
{
guint key = clutter_event_get_key_symbol (event);
int keystate = user_data ? 1 : 0;
lt_info_c("GLFB::%s: 0x%x, %d\n", __func__, key, keystate);
struct input_event ev;
if (key == 'f' && keystate)
{
lt_info_c("GLFB::%s: toggle fullscreen %s\n", __func__, glfb_priv->mFullscreen?"off":"on");
glfb_priv->mFullscreen = !(glfb_priv->mFullscreen);
glfb_priv->mReInit = true;
return true;
}
std::map<int, int>::const_iterator i = glfb_priv->mKeyMap.find(key);
if (i == glfb_priv->mKeyMap.end())
return true;
ev.code = i->second;
ev.value = keystate; /* key own */
ev.type = EV_KEY;
gettimeofday(&ev.time, NULL);
lt_debug_c("GLFB::%s: pushing 0x%x\n", __func__, ev.code);
write(glfb_priv->input_fd, &ev, sizeof(ev));
return true;
}
int sleep_us = 30000;
void GLFbPC::render()
{
if(mShutDown)
clutter_main_quit();
mReInitLock.lock();
if (mReInit)
{
int xoff = 0;
int yoff = 0;
mVAchanged = true;
mReInit = false;
#if 0
mX = &_mX[mFullscreen];
mY = &_mY[mFullscreen];
#endif
*mX = *mY * mOA.num / mOA.den;
if (mFullscreen) {
clutter_stage_set_fullscreen(CLUTTER_STAGE(stage), TRUE);
clutter_actor_show(stage);
clutter_stage_ensure_redraw(CLUTTER_STAGE(stage));
} else {
clutter_stage_set_fullscreen(CLUTTER_STAGE(stage), FALSE);
// *mX = *mY * mOA.num / mOA.den;
clutter_actor_set_size(stage, *mX, *mY);
}
lt_info("%s: reinit mX:%d mY:%d xoff:%d yoff:%d fs %d\n",
__func__, *mX, *mY, xoff, yoff, mFullscreen);
}
mReInitLock.unlock();
bltDisplayBuffer(); /* decoded video stream */
if (mState.blit) {
/* only blit manually after fb->blit(), this helps to find missed blit() calls */
mState.blit = false;
lt_debug("GLFB::%s blit!\n", __func__);
bltOSDBuffer(); /* OSD */
}
if (mVAchanged)
{
mVAchanged = false;
zoom = 1.0;
float xzoom = 1.0;
//xscale = 1.0;
int cmp = av_cmp_q(mVA, mOA);
const AVRational a149 = { 14, 9 };
switch (cmp) {
default:
case INT_MIN: /* invalid */
case 0: /* identical */
lt_debug("%s: mVA == mOA (or fullscreen mode :-)\n", __func__);
break;
case 1: /* mVA > mOA -- video is wider than display */
lt_debug("%s: mVA > mOA\n", __func__);
switch (mCrop) {
case DISPLAY_AR_MODE_PANSCAN:
zoom = av_q2d(mVA) / av_q2d(mOA);
break;
case DISPLAY_AR_MODE_LETTERBOX:
break;
case DISPLAY_AR_MODE_PANSCAN2:
zoom = av_q2d(a149) / av_q2d(mOA);
break;
case DISPLAY_AR_MODE_NONE:
xzoom = av_q2d(mOA) / av_q2d(mVA);
zoom = av_q2d(mVA) / av_q2d(mOA);
break;
default:
break;
}
break;
case -1: /* mVA < mOA -- video is taller than display */
lt_debug("%s: mVA < mOA\n", __func__);
switch (mCrop) {
case DISPLAY_AR_MODE_LETTERBOX:
break;
case DISPLAY_AR_MODE_PANSCAN2:
if (av_cmp_q(a149, mOA) < 0) {
zoom = av_q2d(mVA) * av_q2d(a149) / av_q2d(mOA);
break;
}
/* fallthrough for output format 14:9 */
case DISPLAY_AR_MODE_PANSCAN:
zoom = av_q2d(mOA) / av_q2d(mVA);
break;
case DISPLAY_AR_MODE_NONE:
xzoom = av_q2d(mOA) / av_q2d(mVA);
break;
default:
break;
}
break;
}
lt_debug("zoom: %f xscale: %f xzoom: %f\n", zoom, xscale,xzoom);
clutter_actor_set_scale(vid_actor, xscale*zoom*xzoom, zoom);
}
clutter_timeline_stop(tl);
clutter_timeline_set_delay(tl, sleep_us/1000);
clutter_timeline_start(tl);
}
void GLFbPC::bltOSDBuffer()
{
// lt_info("%s\n", __func__);
int x = glfb_priv->mState.width;
int y = glfb_priv->mState.height;
ClutterContent *fb = clutter_image_new();
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), osd_buf->data(), COGL_PIXEL_FORMAT_BGRA_8888, x, y, x*4, NULL)) {
lt_info("GLFB::%s clutter_image_set_data failed?\n", __func__);
_exit(1); /* life is hard */
}
clutter_actor_set_content(fb_actor, fb);
g_object_unref(fb);
clutter_actor_show(fb_actor);
}
void GLFbPC::bltDisplayBuffer()
{
// lt_info("GLFB::%s vdec: %p\n", __func__, vdec);
if (!vdec) /* cannot start yet */
return;
static bool warn = true;
VDec::SWFramebuffer *buf = vdec->getDecBuf();
if (!buf) {
if (warn)
lt_info("GLFB::%s did not get a buffer...\n", __func__);
warn = false;
return;
}
warn = true;
int w = buf->width(), h = buf->height();
if (w == 0 || h == 0)
return;
AVRational a = buf->AR();
if (a.den != 0 && a.num != 0 && av_cmp_q(a, _mVA)) {
_mVA = a;
/* _mVA is the raw buffer's aspect, mVA is the real scaled output aspect */
av_reduce(&mVA.num, &mVA.den, w * a.num, h * a.den, INT_MAX);
// mVA.num: 16 mVA.den: 9 w: 720 h: 576
// 16*576/720/9 = 1.42222
xscale = (double)mVA.num*h/(double)mVA.den/w;
mVAchanged = true;
}
ClutterContent *fb = clutter_image_new();
if (!clutter_image_set_data(CLUTTER_IMAGE(fb), &(*buf)[0], COGL_PIXEL_FORMAT_BGR_888, w, h, w*3, NULL)) {
lt_info("GLFB::%s clutter_image_set_data failed?\n", __func__);
_exit(1); /* life is hard */
}
clutter_actor_set_content(vid_actor, fb);
g_object_unref(fb);
clutter_actor_show(vid_actor);
/* "rate control" mechanism starts here...
* this implementation is pretty naive and not working too well, but
* better this than nothing... :-) */
int64_t apts = 0;
int64_t vpts = buf->pts();
if (adec)
apts = adec->getPts();
if (apts != last_apts) {
int rate, dummy1, dummy2;
if (apts < vpts)
sleep_us = (sleep_us * 2 + (vpts - apts)*10/9) / 3;
else if (sleep_us > 1000)
sleep_us -= 1000;
last_apts = apts;
vdec->getPictureInfo(dummy1, dummy2, rate);
if (rate > 0)
rate = 2000000 / rate; /* limit to half the frame rate */
else
rate = 50000; /* minimum 20 fps */
if (sleep_us > rate)
sleep_us = rate;
else if (sleep_us < 1)
sleep_us = 1;
}
lt_debug("vpts: 0x%" PRIx64 " apts: 0x%" PRIx64 " diff: %6.3f sleep_us %d buf %d\n",
buf->pts(), apts, (buf->pts() - apts)/90000.0, sleep_us, vdec->buf_num);
}

500
libgeneric-pc/dmx.cpp Normal file
View File

@@ -0,0 +1,500 @@
/*
* cDemux implementation for generic dvbapi
*
* derived from libtriple/dmx_td.cpp
*
* (C) 2010-2013 Stefan Seyfried
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "config.h"
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <poll.h>
#include <errno.h>
#include <inttypes.h>
#include <cstring>
#include <cstdio>
#include <string>
#include <unistd.h>
#include <sys/ioctl.h>
#include "dmx_hal.h"
#include "lt_debug.h"
#include "video_lib.h"
/* needed for getSTC... */
extern cVideo *videoDecoder;
#define lt_debug(args...) _lt_debug(TRIPLE_DEBUG_DEMUX, this, args)
#define lt_info(args...) _lt_info(TRIPLE_DEBUG_DEMUX, this, args)
#define lt_info_c(args...) _lt_info(TRIPLE_DEBUG_DEMUX, NULL, args)
#define dmx_err(_errfmt, _errstr, _revents) do { \
lt_info("%s " _errfmt " fd:%d, ev:0x%x %s pid:0x%04hx flt:0x%02hx\n", \
__func__, _errstr, fd, _revents, DMX_T[dmx_type], pid, flt); \
} while(0);
cDemux *videoDemux = NULL;
cDemux *audioDemux = NULL;
//cDemux *pcrDemux = NULL;
static const char *DMX_T[] = {
"DMX_INVALID",
"DMX_VIDEO",
"DMX_AUDIO",
"DMX_PES",
"DMX_PSI",
"DMX_PIP",
"DMX_TP",
"DMX_PCR"
};
/* map the device numbers. for now only demux0 is used */
static const char *devname[] = {
"/dev/dvb/adapter0/demux0",
"/dev/dvb/adapter0/demux0",
"/dev/dvb/adapter0/demux0"
};
/* uuuugly */
static int dmx_tp_count = 0;
#define MAX_TS_COUNT 8
extern bool HAL_nodec;
cDemux::cDemux(int n)
{
if (n < 0 || n > 2)
{
lt_info("%s ERROR: n invalid (%d)\n", __FUNCTION__, n);
num = 0;
}
else
num = n;
fd = -1;
}
cDemux::~cDemux()
{
lt_debug("%s #%d fd: %d\n", __FUNCTION__, num, fd);
Close();
}
bool cDemux::Open(DMX_CHANNEL_TYPE pes_type, void * /*hVideoBuffer*/, int uBufferSize)
{
int devnum = num;
int flags = O_RDWR|O_CLOEXEC;
if (fd > -1)
lt_info("%s FD ALREADY OPENED? fd = %d\n", __FUNCTION__, fd);
dmx_type = pes_type;
if (pes_type != DMX_PSI_CHANNEL)
flags |= O_NONBLOCK;
fd = open(devname[devnum], flags);
if (fd < 0)
{
lt_info("%s %s: %m\n", __FUNCTION__, devname[devnum]);
return false;
}
lt_debug("%s #%d pes_type: %s(%d), uBufferSize: %d fd: %d\n", __func__,
num, DMX_T[pes_type], pes_type, uBufferSize, fd);
if (dmx_type == DMX_VIDEO_CHANNEL)
uBufferSize = 0x100000; /* 1MB */
if (dmx_type == DMX_AUDIO_CHANNEL)
uBufferSize = 0x10000; /* 64k */
#if 0
if (!pesfds.empty())
{
lt_info("%s ERROR! pesfds not empty!\n", __FUNCTION__); /* TODO: error handling */
return false;
}
int n = DMX_SOURCE_FRONT0;
if (ioctl(fd, DMX_SET_SOURCE, &n) < 0)
lt_info("%s DMX_SET_SOURCE %d failed! (%m)\n", __func__, n);
#endif
if (uBufferSize > 0)
{
/* probably uBufferSize == 0 means "use default size". TODO: find a reasonable default */
if (ioctl(fd, DMX_SET_BUFFER_SIZE, uBufferSize) < 0)
lt_info("%s DMX_SET_BUFFER_SIZE failed (%m)\n", __func__);
}
buffersize = uBufferSize;
return true;
}
void cDemux::Close(void)
{
lt_debug("%s #%d, fd = %d\n", __FUNCTION__, num, fd);
if (fd < 0)
{
lt_info("%s #%d: not open!\n", __FUNCTION__, num);
return;
}
pesfds.clear();
ioctl(fd, DMX_STOP);
close(fd);
fd = -1;
if (dmx_type == DMX_TP_CHANNEL)
{
dmx_tp_count--;
if (dmx_tp_count < 0)
{
lt_info("%s dmx_tp_count < 0!!\n", __func__);
dmx_tp_count = 0;
}
}
}
bool cDemux::Start(bool)
{
lt_debug("%s #%d fd: %d type: %s\n", __func__, num, fd, DMX_T[dmx_type]);
if (fd < 0)
{
lt_info("%s #%d: not open!\n", __FUNCTION__, num);
return false;
}
ioctl(fd, DMX_START);
return true;
}
bool cDemux::Stop(void)
{
lt_debug("%s #%d fd: %d type: %s\n", __func__, num, fd, DMX_T[dmx_type]);
if (fd < 0)
{
lt_info("%s #%d: not open!\n", __FUNCTION__, num);
return false;
}
ioctl(fd, DMX_STOP);
return true;
}
int cDemux::Read(unsigned char *buff, int len, int timeout)
{
#if 0
if (len != 4095 && timeout != 100)
fprintf(stderr, "cDemux::%s #%d fd: %d type: %s len: %d timeout: %d\n",
__FUNCTION__, num, fd, DMX_T[dmx_type], len, timeout);
#endif
int rc;
struct pollfd ufds;
ufds.fd = fd;
ufds.events = POLLIN|POLLPRI|POLLERR;
ufds.revents = 0;
if (timeout > 0)
{
retry:
rc = ::poll(&ufds, 1, timeout);
if (!rc)
return 0; // timeout
else if (rc < 0)
{
dmx_err("poll: %s,", strerror(errno), 0)
//lt_info("%s poll: %m\n", __FUNCTION__);
/* happens, when running under gdb... */
if (errno == EINTR)
goto retry;
return -1;
}
#if 0
if (ufds.revents & POLLERR) /* POLLERR means buffer error, i.e. buffer overflow */
{
dmx_err("received %s,", "POLLERR", ufds.revents);
/* this seems to happen sometimes at recording start, without bad effects */
return 0;
}
#endif
if (ufds.revents & POLLHUP) /* we get POLLHUP if e.g. a too big DMX_BUFFER_SIZE was set */
{
dmx_err("received %s,", "POLLHUP", ufds.revents);
return -1;
}
if (!(ufds.revents & POLLIN)) /* we requested POLLIN but did not get it? */
{
dmx_err("received %s, please report!", "POLLIN", ufds.revents);
return 0;
}
}
rc = ::read(fd, buff, len);
//fprintf(stderr, "fd %d ret: %d\n", fd, rc);
if (rc < 0)
dmx_err("read: %s", strerror(errno), 0);
return rc;
}
bool cDemux::sectionFilter(unsigned short _pid, const unsigned char * const filter,
const unsigned char * const mask, int len, int timeout,
const unsigned char * const negmask)
{
struct dmx_sct_filter_params s_flt;
memset(&s_flt, 0, sizeof(s_flt));
pid = _pid;
if (len > DMX_FILTER_SIZE)
{
lt_info("%s #%d: len too long: %d, DMX_FILTER_SIZE %d\n", __func__, num, len, DMX_FILTER_SIZE);
len = DMX_FILTER_SIZE;
}
s_flt.pid = pid;
s_flt.timeout = timeout;
flt = filter[0];
memcpy(s_flt.filter.filter, filter, len);
memcpy(s_flt.filter.mask, mask, len);
if (negmask != NULL)
memcpy(s_flt.filter.mode, negmask, len);
s_flt.flags = DMX_IMMEDIATE_START|DMX_CHECK_CRC;
int to = 0;
switch (filter[0]) {
case 0x00: /* program_association_section */
to = 2000;
break;
case 0x01: /* conditional_access_section */
to = 6000;
break;
case 0x02: /* program_map_section */
to = 1500;
break;
case 0x03: /* transport_stream_description_section */
to = 10000;
break;
/* 0x04 - 0x3F: reserved */
case 0x40: /* network_information_section - actual_network */
to = 10000;
break;
case 0x41: /* network_information_section - other_network */
to = 15000;
break;
case 0x42: /* service_description_section - actual_transport_stream */
to = 10000;
break;
/* 0x43 - 0x45: reserved for future use */
case 0x46: /* service_description_section - other_transport_stream */
to = 10000;
break;
/* 0x47 - 0x49: reserved for future use */
case 0x4A: /* bouquet_association_section */
to = 11000;
break;
/* 0x4B - 0x4D: reserved for future use */
case 0x4E: /* event_information_section - actual_transport_stream, present/following */
to = 2000;
break;
case 0x4F: /* event_information_section - other_transport_stream, present/following */
to = 10000;
break;
/* 0x50 - 0x5F: event_information_section - actual_transport_stream, schedule */
/* 0x60 - 0x6F: event_information_section - other_transport_stream, schedule */
case 0x70: /* time_date_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
//s_flt.pid = 0x0014;
to = 30000;
break;
case 0x71: /* running_status_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x72: /* stuffing_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x73: /* time_offset_section */
//s_flt.pid = 0x0014;
to = 30000;
break;
/* 0x74 - 0x7D: reserved for future use */
case 0x7E: /* discontinuity_information_section */
s_flt.flags &= ~DMX_CHECK_CRC; /* section has no CRC */
to = 0;
break;
case 0x7F: /* selection_information_section */
to = 0;
break;
/* 0x80 - 0x8F: ca_message_section */
/* 0x90 - 0xFE: user defined */
/* 0xFF: reserved */
default:
break;
// return -1;
}
/* the negmask == NULL is a hack: the users of negmask are PMT-update
* and sectionsd EIT-Version change. And they really want no timeout
* if timeout == 0 instead of "default timeout" */
if (timeout == 0 && negmask == NULL)
s_flt.timeout = to;
lt_debug("%s #%d pid:0x%04hx fd:%d type:%s len:%d to:%d flags:%x flt[0]:%02x\n", __func__, num,
pid, fd, DMX_T[dmx_type], len, s_flt.timeout,s_flt.flags, s_flt.filter.filter[0]);
#if 0
fprintf(stderr,"filt: ");for(int i=0;i<DMX_FILTER_SIZE;i++)fprintf(stderr,"%02hhx ",s_flt.filter.filter[i]);fprintf(stderr,"\n");
fprintf(stderr,"mask: ");for(int i=0;i<DMX_FILTER_SIZE;i++)fprintf(stderr,"%02hhx ",s_flt.filter.mask [i]);fprintf(stderr,"\n");
fprintf(stderr,"mode: ");for(int i=0;i<DMX_FILTER_SIZE;i++)fprintf(stderr,"%02hhx ",s_flt.filter.mode [i]);fprintf(stderr,"\n");
#endif
ioctl (fd, DMX_STOP);
if (ioctl(fd, DMX_SET_FILTER, &s_flt) < 0)
return false;
return true;
}
bool cDemux::pesFilter(const unsigned short _pid)
{
struct dmx_pes_filter_params p_flt;
pid = _pid;
flt = 0;
/* allow PID 0 for web streaming e.g.
* this check originally is from tuxbox cvs but I'm not sure
* what it is good for...
if (pid <= 0x0001 && dmx_type != DMX_PCR_ONLY_CHANNEL)
return false;
*/
if ((pid >= 0x0002 && pid <= 0x000f) || pid >= 0x1fff)
return false;
lt_debug("%s #%d pid: 0x%04hx fd: %d type: %s\n", __FUNCTION__, num, pid, fd, DMX_T[dmx_type]);
memset(&p_flt, 0, sizeof(p_flt));
p_flt.pid = pid;
p_flt.output = DMX_OUT_DECODER;
p_flt.input = DMX_IN_FRONTEND;
switch (dmx_type) {
case DMX_PCR_ONLY_CHANNEL:
p_flt.pes_type = DMX_PES_PCR;
if (HAL_nodec)
return true;
break;
case DMX_AUDIO_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
if (HAL_nodec) /* no need to demux if we don't decode... */
return true;
break;
case DMX_VIDEO_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
if (HAL_nodec)
return true;
break;
case DMX_PES_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TAP;
break;
case DMX_TP_CHANNEL:
p_flt.pes_type = DMX_PES_OTHER;
p_flt.output = DMX_OUT_TSDEMUX_TAP;
break;
default:
lt_info("%s #%d invalid dmx_type %d!\n", __func__, num, dmx_type);
return false;
}
return (ioctl(fd, DMX_SET_PES_FILTER, &p_flt) >= 0);
}
void cDemux::SetSyncMode(AVSYNC_TYPE /*mode*/)
{
lt_debug("%s #%d\n", __FUNCTION__, num);
}
void *cDemux::getBuffer()
{
lt_debug("%s #%d\n", __FUNCTION__, num);
return NULL;
}
void *cDemux::getChannel()
{
lt_debug("%s #%d\n", __FUNCTION__, num);
return NULL;
}
bool cDemux::addPid(unsigned short Pid)
{
lt_debug("%s: pid 0x%04hx\n", __func__, Pid);
pes_pids pfd;
int ret;
if (dmx_type != DMX_TP_CHANNEL)
{
lt_info("%s pes_type %s not implemented yet! pid=%hx\n", __FUNCTION__, DMX_T[dmx_type], Pid);
return false;
}
if (fd == -1)
lt_info("%s bucketfd not yet opened? pid=%hx\n", __FUNCTION__, Pid);
pfd.fd = fd; /* dummy */
pfd.pid = Pid;
pesfds.push_back(pfd);
ret = (ioctl(fd, DMX_ADD_PID, &Pid));
if (ret < 0)
lt_info("%s: DMX_ADD_PID (%m)\n", __func__);
return (ret != -1);
}
void cDemux::removePid(unsigned short Pid)
{
if (dmx_type != DMX_TP_CHANNEL)
{
lt_info("%s pes_type %s not implemented yet! pid=%hx\n", __FUNCTION__, DMX_T[dmx_type], Pid);
return;
}
for (std::vector<pes_pids>::iterator i = pesfds.begin(); i != pesfds.end(); ++i)
{
if ((*i).pid == Pid) {
lt_debug("removePid: removing demux fd %d pid 0x%04x\n", fd, Pid);
if (ioctl(fd, DMX_REMOVE_PID, Pid) < 0)
lt_info("%s: (DMX_REMOVE_PID, 0x%04hx): %m\n", __func__, Pid);
pesfds.erase(i);
return; /* TODO: what if the same PID is there multiple times */
}
}
lt_info("%s pid 0x%04x not found\n", __FUNCTION__, Pid);
}
void cDemux::getSTC(int64_t * STC)
{
int64_t pts = 0;
if (videoDecoder)
pts = videoDecoder->GetPTS();
*STC = pts;
}
int cDemux::getUnit(void)
{
lt_debug("%s #%d\n", __FUNCTION__, num);
/* just guessed that this is the right thing to do.
right now this is only used by the CA code which is stubbed out
anyway */
return num;
}
bool cDemux::SetSource(int unit, int source)
{
lt_info_c("%s(%d, %d): not implemented yet\n", __func__, unit, source);
return true;
}
int cDemux::GetSource(int unit)
{
lt_info_c("%s(%d): not implemented yet\n", __func__, unit);
return 0;
}

646
libgeneric-pc/glfb.cpp Normal file
View File

@@ -0,0 +1,646 @@
/*
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
openGL based framebuffer implementation
based on Carjay's neutrino-hd-dvbapi work, see
http://gitorious.org/neutrino-hd/neutrino-hd-dvbapi
TODO: AV-Sync code is "experimental" at best
*/
#include "config.h"
#include <vector>
#include <sys/types.h>
#include <signal.h>
#include <cstdio>
#include <cstring>
#include <errno.h>
#include <inttypes.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <linux/input.h>
#include "glfb_priv.h"
#include "video_lib.h"
#include "audio_lib.h"
#include "lt_debug.h"
#define lt_debug_c(args...) _lt_debug(HAL_DEBUG_INIT, NULL, args)
#define lt_info_c(args...) _lt_info(HAL_DEBUG_INIT, NULL, args)
#define lt_debug(args...) _lt_debug(HAL_DEBUG_INIT, this, args)
#define lt_info(args...) _lt_info(HAL_DEBUG_INIT, this, args)
extern cVideo *videoDecoder;
extern cAudio *audioDecoder;
/* the private class that does stuff only needed inside libstb-hal.
* is used e.g. by cVideo... */
GLFbPC *glfb_priv = NULL;
GLFramebuffer::GLFramebuffer(int x, int y)
{
Init();
glfb_priv = new GLFbPC(x, y, osd_buf);
si = glfb_priv->getScreenInfo();
start();
while (!glfb_priv->mInitDone)
usleep(1);
}
GLFramebuffer::~GLFramebuffer()
{
glfb_priv->mShutDown = true;
join();
delete glfb_priv;
glfb_priv = NULL;
}
void GLFramebuffer::blit()
{
glfb_priv->blit();
}
GLFbPC::GLFbPC(int x, int y, std::vector<unsigned char> &buf): mReInit(true), mShutDown(false), mInitDone(false)
{
osd_buf = &buf;
mState.width = x;
mState.height = y;
mX = &_mX[0];
mY = &_mY[0];
*mX = x;
*mY = y;
av_reduce(&mOA.num, &mOA.den, x, y, INT_MAX);
mVA = mOA; /* initial aspect ratios are from the FB resolution, those */
_mVA = mVA; /* will be updated by the videoDecoder functions anyway */
mVAchanged = true;
mCrop = DISPLAY_AR_MODE_PANSCAN;
zoom = 1.0;
xscale = 1.0;
const char *tmp = getenv("GLFB_FULLSCREEN");
mFullscreen = !!(tmp);
mState.blit = true;
last_apts = 0;
/* linux framebuffer compat mode */
si.bits_per_pixel = 32;
si.xres = mState.width;
si.xres_virtual = si.xres;
si.yres = mState.height;
si.yres_virtual = si.yres;
si.blue.length = 8;
si.blue.offset = 0;
si.green.length = 8;
si.green.offset = 8;
si.red.length = 8;
si.red.offset = 16;
si.transp.length = 8;
si.transp.offset = 24;
unlink("/tmp/neutrino.input");
mkfifo("/tmp/neutrino.input", 0600);
input_fd = open("/tmp/neutrino.input", O_RDWR|O_CLOEXEC|O_NONBLOCK);
if (input_fd < 0)
lt_info("%s: could not open /tmp/neutrino.input FIFO: %m\n", __func__);
initKeys();
}
GLFbPC::~GLFbPC()
{
mShutDown = true;
if (input_fd >= 0)
close(input_fd);
osd_buf->clear();
}
void GLFbPC::initKeys()
{
/*
Keep in sync with initKeys() in clutterfb.cpp
*/
mSpecialMap[GLUT_KEY_UP] = KEY_UP;
mSpecialMap[GLUT_KEY_DOWN] = KEY_DOWN;
mSpecialMap[GLUT_KEY_LEFT] = KEY_LEFT;
mSpecialMap[GLUT_KEY_RIGHT] = KEY_RIGHT;
mSpecialMap[GLUT_KEY_F1] = KEY_RED;
mSpecialMap[GLUT_KEY_F2] = KEY_GREEN;
mSpecialMap[GLUT_KEY_F3] = KEY_YELLOW;
mSpecialMap[GLUT_KEY_F4] = KEY_BLUE;
mSpecialMap[GLUT_KEY_F5] = KEY_RECORD;
mSpecialMap[GLUT_KEY_F6] = KEY_PLAY;
mSpecialMap[GLUT_KEY_F7] = KEY_PAUSE;
mSpecialMap[GLUT_KEY_F8] = KEY_STOP;
mSpecialMap[GLUT_KEY_F9] = KEY_FORWARD;
mSpecialMap[GLUT_KEY_F10] = KEY_REWIND;
mSpecialMap[GLUT_KEY_F11] = KEY_NEXT;
mSpecialMap[GLUT_KEY_F12] = KEY_PREVIOUS;
mSpecialMap[GLUT_KEY_PAGE_UP] = KEY_PAGEUP;
mSpecialMap[GLUT_KEY_PAGE_DOWN] = KEY_PAGEDOWN;
mKeyMap[0x0d] = KEY_OK;
mKeyMap[0x1b] = KEY_EXIT;
mKeyMap['0'] = KEY_0;
mKeyMap['1'] = KEY_1;
mKeyMap['2'] = KEY_2;
mKeyMap['3'] = KEY_3;
mKeyMap['4'] = KEY_4;
mKeyMap['5'] = KEY_5;
mKeyMap['6'] = KEY_6;
mKeyMap['7'] = KEY_7;
mKeyMap['8'] = KEY_8;
mKeyMap['9'] = KEY_9;
mKeyMap['+'] = KEY_VOLUMEUP;
mKeyMap['-'] = KEY_VOLUMEDOWN;
mKeyMap['.'] = KEY_MUTE;
mKeyMap['a'] = KEY_AUDIO;
mKeyMap['e'] = KEY_EPG;
// ['f'] is reserved to toggle fullscreen;
mKeyMap['g'] = KEY_GAMES;
mKeyMap['h'] = KEY_HELP;
mKeyMap['i'] = KEY_INFO;
mKeyMap['m'] = KEY_MENU;
mKeyMap['p'] = KEY_POWER;
mKeyMap['r'] = KEY_RADIO;
mKeyMap['s'] = KEY_SUBTITLE;
mKeyMap['t'] = KEY_TV;
mKeyMap['v'] = KEY_VIDEO;
mKeyMap['z'] = KEY_SLEEP;
/* shift keys */
mKeyMap['F'] = KEY_FAVORITES;
mKeyMap['M'] = KEY_MODE;
mKeyMap['S'] = KEY_SAT;
mKeyMap['T'] = KEY_TEXT;
mKeyMap['W'] = KEY_WWW;
}
void GLFramebuffer::run()
{
int argc = 1;
int x = glfb_priv->mState.width;
int y = glfb_priv->mState.height;
/* some dummy commandline for GLUT to be happy */
char const *argv[2] = { "neutrino", 0 };
lt_info("GLFB: GL thread starting x %d y %d\n", x, y);
glutInit(&argc, const_cast<char **>(argv));
glutInitWindowSize(x, y);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH);
glutCreateWindow("Neutrino");
/* 32bit FB depth, *2 because tuxtxt uses a shadow buffer */
int fbmem = x * y * 4 * 2;
osd_buf.resize(fbmem);
lt_info("GLFB: OSD buffer set to %d bytes at 0x%p\n", fbmem, osd_buf.data());
glfb_priv->mInitDone = true; /* signal that setup is finished */
/* init the good stuff */
GLenum err = glewInit();
if(err == GLEW_OK)
{
if((!GLEW_VERSION_1_5)||(!GLEW_EXT_pixel_buffer_object)||(!GLEW_ARB_texture_non_power_of_two))
{
lt_info("GLFB: Sorry, your graphics card is not supported. "
"Needs at least OpenGL 1.5, pixel buffer objects and NPOT textures.\n");
lt_info("incompatible graphics card: %m");
_exit(1); /* Life is hard */
}
else
{
glutSetCursor(GLUT_CURSOR_NONE);
glutDisplayFunc(GLFbPC::rendercb);
glutKeyboardFunc(GLFbPC::keyboardcb);
glutSpecialFunc(GLFbPC::specialcb);
glutReshapeFunc(GLFbPC::resizecb);
glfb_priv->setupGLObjects(); /* needs GLEW prototypes */
glutSetOption(GLUT_ACTION_ON_WINDOW_CLOSE, GLUT_ACTION_CONTINUE_EXECUTION);
glutMainLoop();
glfb_priv->releaseGLObjects();
}
}
else
lt_info("GLFB: error initializing glew: %d\n", err);
lt_info("GLFB: GL thread stopping\n");
}
#if 0
void GLFbPC::setupCtx()
{
int argc = 1;
/* some dummy commandline for GLUT to be happy */
char const *argv[2] = { "neutrino", 0 };
lt_info("GLFB: GL thread starting x %d y %d\n", mX[0], mY[0]);
glutInit(&argc, const_cast<char **>(argv));
glutInitWindowSize(mX[0], mY[0]);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH);
glutCreateWindow("Neutrino");
}
void GLFbPC::setupOSDBuffer()
{ /* the OSD buffer size can be decoupled from the actual
window size since the GL can blit-stretch with no
trouble at all, ah, the luxury of ignorance... */
// mMutex.lock();
if (mState.width && mState.height)
{
/* 32bit FB depth, *2 because tuxtxt uses a shadow buffer */
int fbmem = mState.width * mState.height * 4 * 2;
osd_buf->resize(fbmem);
lt_info("GLFB: OSD buffer set to %d bytes at 0x%p\n", fbmem, osd_buf->data());
}
}
#endif
void GLFbPC::setupGLObjects()
{
unsigned char buf[4] = { 0, 0, 0, 0 }; /* 1 black pixel */
glGenTextures(1, &mState.osdtex);
glGenTextures(1, &mState.displaytex);
glBindTexture(GL_TEXTURE_2D, mState.osdtex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, mState.width, mState.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glBindTexture(GL_TEXTURE_2D, mState.displaytex); /* we do not yet know the size so will set that inline */
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glGenBuffers(1, &mState.pbo);
glGenBuffers(1, &mState.displaypbo);
/* hack to start with black video buffer instead of white */
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, mState.displaypbo);
glBufferData(GL_PIXEL_UNPACK_BUFFER, sizeof(buf), buf, GL_STREAM_DRAW_ARB);
glBindTexture(GL_TEXTURE_2D, mState.displaytex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1, 1, 0, GL_BGRA, GL_UNSIGNED_BYTE, 0);
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
}
void GLFbPC::releaseGLObjects()
{
glDeleteBuffers(1, &mState.pbo);
glDeleteBuffers(1, &mState.displaypbo);
glDeleteTextures(1, &mState.osdtex);
glDeleteTextures(1, &mState.displaytex);
}
/* static */ void GLFbPC::rendercb()
{
glfb_priv->render();
}
/* static */ void GLFbPC::keyboardcb(unsigned char key, int /*x*/, int /*y*/)
{
lt_debug_c("GLFB::%s: 0x%x\n", __func__, key);
struct input_event ev;
if (key == 'f')
{
lt_info_c("GLFB::%s: toggle fullscreen %s\n", __func__, glfb_priv->mFullscreen?"off":"on");
glfb_priv->mFullscreen = !(glfb_priv->mFullscreen);
glfb_priv->mReInit = true;
return;
}
std::map<unsigned char, int>::const_iterator i = glfb_priv->mKeyMap.find(key);
if (i == glfb_priv->mKeyMap.end())
return;
ev.code = i->second;
ev.value = 1; /* key own */
ev.type = EV_KEY;
gettimeofday(&ev.time, NULL);
lt_debug_c("GLFB::%s: pushing 0x%x\n", __func__, ev.code);
write(glfb_priv->input_fd, &ev, sizeof(ev));
ev.value = 0; /* neutrino is stupid, so push key up directly after key down */
write(glfb_priv->input_fd, &ev, sizeof(ev));
}
/* static */ void GLFbPC::specialcb(int key, int /*x*/, int /*y*/)
{
lt_debug_c("GLFB::%s: 0x%x\n", __func__, key);
struct input_event ev;
std::map<int, int>::const_iterator i = glfb_priv->mSpecialMap.find(key);
if (i == glfb_priv->mSpecialMap.end())
return;
ev.code = i->second;
ev.value = 1;
ev.type = EV_KEY;
gettimeofday(&ev.time, NULL);
lt_debug_c("GLFB::%s: pushing 0x%x\n", __func__, ev.code);
write(glfb_priv->input_fd, &ev, sizeof(ev));
ev.value = 0;
write(glfb_priv->input_fd, &ev, sizeof(ev));
}
int sleep_us = 30000;
void GLFbPC::render()
{
if(mShutDown)
glutLeaveMainLoop();
mReInitLock.lock();
if (mReInit)
{
int xoff = 0;
int yoff = 0;
mVAchanged = true;
mReInit = false;
mX = &_mX[mFullscreen];
mY = &_mY[mFullscreen];
if (mFullscreen) {
int x = glutGet(GLUT_SCREEN_WIDTH);
int y = glutGet(GLUT_SCREEN_HEIGHT);
*mX = x;
*mY = y;
AVRational a = { x, y };
if (av_cmp_q(a, mOA) < 0)
*mY = x * mOA.den / mOA.num;
else if (av_cmp_q(a, mOA) > 0)
*mX = y * mOA.num / mOA.den;
xoff = (x - *mX) / 2;
yoff = (y - *mY) / 2;
glutFullScreen();
} else
*mX = *mY * mOA.num / mOA.den;
lt_info("%s: reinit mX:%d mY:%d xoff:%d yoff:%d fs %d\n",
__func__, *mX, *mY, xoff, yoff, mFullscreen);
glViewport(xoff, yoff, *mX, *mY);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
float aspect = static_cast<float>(*mX)/ *mY;
float osdaspect = static_cast<float>(mOA.den) / mOA.num;
glOrtho(aspect*-osdaspect, aspect*osdaspect, -1.0, 1.0, -1.0, 1.0 );
glClearColor(0.0, 0.0, 0.0, 1.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_BLEND);
glEnable(GL_TEXTURE_2D);
glDisable(GL_DEPTH_TEST);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}
mReInitLock.unlock();
if (!mFullscreen && (*mX != glutGet(GLUT_WINDOW_WIDTH) || *mY != glutGet(GLUT_WINDOW_HEIGHT)))
glutReshapeWindow(*mX, *mY);
bltDisplayBuffer(); /* decoded video stream */
if (mState.blit) {
/* only blit manually after fb->blit(), this helps to find missed blit() calls */
mState.blit = false;
lt_debug("GLFB::%s blit!\n", __func__);
bltOSDBuffer(); /* OSD */
}
glBindTexture(GL_TEXTURE_2D, mState.osdtex);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if (mVAchanged)
{
mVAchanged = false;
zoom = 1.0;
xscale = 1.0;
int cmp = (mCrop == DISPLAY_AR_MODE_NONE) ? 0 : av_cmp_q(mVA, mOA);
const AVRational a149 = { 14, 9 };
switch (cmp) {
default:
case INT_MIN: /* invalid */
case 0: /* identical */
lt_debug("%s: mVA == mOA (or fullscreen mode :-)\n", __func__);
break;
case 1: /* mVA > mOA -- video is wider than display */
lt_debug("%s: mVA > mOA\n", __func__);
xscale = av_q2d(mVA) / av_q2d(mOA);
switch (mCrop) {
case DISPLAY_AR_MODE_PANSCAN:
break;
case DISPLAY_AR_MODE_LETTERBOX:
zoom = av_q2d(mOA) / av_q2d(mVA);
break;
case DISPLAY_AR_MODE_PANSCAN2:
zoom = av_q2d(mOA) / av_q2d(a149);
break;
default:
break;
}
break;
case -1: /* mVA < mOA -- video is taller than display */
lt_debug("%s: mVA < mOA\n", __func__);
xscale = av_q2d(mVA) / av_q2d(mOA);
switch (mCrop) {
case DISPLAY_AR_MODE_LETTERBOX:
break;
case DISPLAY_AR_MODE_PANSCAN2:
if (av_cmp_q(a149, mOA) < 0) {
zoom = av_q2d(mVA) * av_q2d(a149) / av_q2d(mOA);
break;
}
/* fallthrough for output format 14:9 */
case DISPLAY_AR_MODE_PANSCAN:
zoom = av_q2d(mOA) / av_q2d(mVA);
break;
default:
break;
}
break;
}
}
glBindTexture(GL_TEXTURE_2D, mState.displaytex);
drawSquare(zoom, xscale);
glBindTexture(GL_TEXTURE_2D, mState.osdtex);
drawSquare(1.0, -100);
glFlush();
glutSwapBuffers();
GLuint err = glGetError();
if (err != 0)
lt_info("GLFB::%s: GLError:%d 0x%04x\n", __func__, err, err);
if (sleep_us > 0)
usleep(sleep_us);
glutPostRedisplay();
}
/* static */ void GLFbPC::resizecb(int w, int h)
{
glfb_priv->checkReinit(w, h);
}
void GLFbPC::checkReinit(int x, int y)
{
static int last_x = 0, last_y = 0;
mReInitLock.lock();
if (!mFullscreen && !mReInit && (x != *mX || y != *mY)) {
if (x != *mX && abs(x - last_x) > 2) {
*mX = x;
*mY = *mX * mOA.den / mOA.num;
} else if (y != *mY && abs(y - last_y) > 2) {
*mY = y;
*mX = *mY * mOA.num / mOA.den;
}
mReInit = true;
}
mReInitLock.unlock();
last_x = x;
last_y = y;
}
void GLFbPC::drawSquare(float size, float x_factor)
{
GLfloat vertices[] = {
1.0f, 1.0f,
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
};
GLubyte indices[] = { 0, 1, 2, 3 };
GLfloat texcoords[] = {
1.0, 0.0,
0.0, 0.0,
0.0, 1.0,
1.0, 1.0,
};
if (x_factor > -99.0) { /* x_factor == -100 => OSD */
if (videoDecoder &&
videoDecoder->pig_x > 0 && videoDecoder->pig_y > 0 &&
videoDecoder->pig_w > 0 && videoDecoder->pig_h > 0) {
/* these calculations even consider cropping and panscan mode
* maybe this could be done with some clever opengl tricks? */
double w2 = (double)mState.width * 0.5l;
double h2 = (double)mState.height * 0.5l;
double x = (double)(videoDecoder->pig_x - w2) / w2 / x_factor / size;
double y = (double)(h2 - videoDecoder->pig_y) / h2 / size;
double w = (double)videoDecoder->pig_w / w2;
double h = (double)videoDecoder->pig_h / h2;
x += ((1.0l - x_factor * size) / 2.0l) * w / x_factor / size;
y += ((size - 1.0l) / 2.0l) * h / size;
vertices[0] = x + w; /* top right x */
vertices[1] = y; /* top right y */
vertices[2] = x; /* top left x */
vertices[3] = y; /* top left y */
vertices[4] = x; /* bottom left x */
vertices[5] = y - h; /* bottom left y */
vertices[6] = vertices[0]; /* bottom right x */
vertices[7] = vertices[5]; /* bottom right y */
}
} else
x_factor = 1.0; /* OSD */
glPushMatrix();
glScalef(size * x_factor, size, size);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texcoords);
glDrawElements(GL_QUADS, 4, GL_UNSIGNED_BYTE, indices);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glPopMatrix();
}
void GLFbPC::bltOSDBuffer()
{
/* FIXME: copy each time */
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, mState.pbo);
glBufferData(GL_PIXEL_UNPACK_BUFFER, osd_buf->size(), osd_buf->data(), GL_STREAM_DRAW_ARB);
glBindTexture(GL_TEXTURE_2D, mState.osdtex);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, mState.width, mState.height, GL_BGRA, GL_UNSIGNED_BYTE, 0);
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
}
void GLFbPC::bltDisplayBuffer()
{
if (!videoDecoder) /* cannot start yet */
return;
static bool warn = true;
cVideo::SWFramebuffer *buf = videoDecoder->getDecBuf();
if (!buf) {
if (warn)
lt_info("GLFB::%s did not get a buffer...\n", __func__);
warn = false;
return;
}
warn = true;
int w = buf->width(), h = buf->height();
if (w == 0 || h == 0)
return;
AVRational a = buf->AR();
if (a.den != 0 && a.num != 0 && av_cmp_q(a, _mVA)) {
_mVA = a;
/* _mVA is the raw buffer's aspect, mVA is the real scaled output aspect */
av_reduce(&mVA.num, &mVA.den, w * a.num, h * a.den, INT_MAX);
mVAchanged = true;
}
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, mState.displaypbo);
glBufferData(GL_PIXEL_UNPACK_BUFFER, buf->size(), &(*buf)[0], GL_STREAM_DRAW_ARB);
glBindTexture(GL_TEXTURE_2D, mState.displaytex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_BGRA, GL_UNSIGNED_BYTE, 0);
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
/* "rate control" mechanism starts here...
* this implementation is pretty naive and not working too well, but
* better this than nothing... :-) */
int64_t apts = 0;
/* 18000 is the magic value for A/V sync in my libao->pulseaudio->intel_hda setup */
int64_t vpts = buf->pts() + 18000;
if (audioDecoder)
apts = audioDecoder->getPts();
if (apts != last_apts) {
int rate, dummy1, dummy2;
if (apts < vpts)
sleep_us = (sleep_us * 2 + (vpts - apts)*10/9) / 3;
else if (sleep_us > 1000)
sleep_us -= 1000;
last_apts = apts;
videoDecoder->getPictureInfo(dummy1, dummy2, rate);
if (rate > 0)
rate = 2000000 / rate; /* limit to half the frame rate */
else
rate = 50000; /* minimum 20 fps */
if (sleep_us > rate)
sleep_us = rate;
else if (sleep_us < 1)
sleep_us = 1;
}
lt_debug("vpts: 0x%" PRIx64 " apts: 0x%" PRIx64 " diff: %6.3f sleep_us %d buf %d\n",
buf->pts(), apts, (buf->pts() - apts)/90000.0, sleep_us, videoDecoder->buf_num);
}

126
libgeneric-pc/glfb_priv.h Normal file
View File

@@ -0,0 +1,126 @@
/*
Copyright 2010 Carsten Juttner <carjay@gmx.net>
Copyright 2012,2013,2016 Stefan Seyfried <seife@tuxboxcvs.slipkontur.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
********************************************************************
private stuff of the GLFB thread that is only used inside libstb-hal
and not exposed to the application.
*/
#ifndef __glfb_priv__
#define __glfb_priv__
#include <OpenThreads/Mutex>
#include <vector>
#include <map>
#if USE_OPENGL
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <GL/gl.h>
#include <linux/fb.h> /* for screeninfo etc. */
#endif
#if USE_CLUTTER
#include <clutter/clutter.h>
#endif
#include "glfb.h"
extern "C" {
#include <libavutil/rational.h>
}
class GLFbPC
{
public:
GLFbPC(int x, int y, std::vector<unsigned char> &buf);
~GLFbPC();
std::vector<unsigned char> *getOSDBuffer() { return osd_buf; } /* pointer to OSD bounce buffer */
int getOSDWidth() { return mState.width; }
int getOSDHeight() { return mState.height; }
void blit() { mState.blit = true; };
fb_var_screeninfo getScreenInfo() { return si; }
void setOutputFormat(AVRational a, int h, int c) { mOA = a; *mY = h; mCrop = c; mReInit = true; }
/* just make everything public for simplicity - this is only used inside libstb-hal anyway
private:
*/
fb_var_screeninfo si;
int *mX;
int *mY;
int _mX[2]; /* output window size */
int _mY[2]; /* [0] = normal, [1] = fullscreen */
AVRational mOA; /* output window aspect ratio */
AVRational mVA; /* video aspect ratio */
AVRational _mVA; /* for detecting changes in mVA */
bool mVAchanged;
float zoom; /* for cropping */
float xscale; /* and aspect ratio */
int mCrop; /* DISPLAY_AR_MODE */
bool mFullscreen; /* fullscreen? */
bool mReInit; /* setup things for GL */
OpenThreads::Mutex mReInitLock;
bool mShutDown; /* if set main loop is left */
bool mInitDone; /* condition predicate */
// OpenThreads::Condition mInitCond; /* condition variable for init */
// mutable OpenThreads::Mutex mMutex; /* lock our data */
std::vector<unsigned char> *osd_buf; /* silly bounce buffer */
#if USE_OPENGL
std::map<unsigned char, int> mKeyMap;
std::map<int, int> mSpecialMap;
#endif
#if USE_CLUTTER
std::map<int, int> mKeyMap;
#endif
int input_fd;
int64_t last_apts;
void run();
static void rendercb(); /* callback for GLUT */
void render(); /* actual render function */
#if USE_OPENGL
static void keyboardcb(unsigned char key, int x, int y);
static void specialcb(int key, int x, int y);
static void resizecb(int w, int h);
void checkReinit(int w, int h); /* e.g. in case window was resized */
void setupGLObjects(); /* PBOs, textures and stuff */
void releaseGLObjects();
void drawSquare(float size, float x_factor = 1); /* do not be square */
#endif
#if USE_CLUTTER
static bool keyboardcb(ClutterActor *actor, ClutterEvent *event, gpointer user_data);
#endif
void initKeys(); /* setup key bindings for window */
#if 0
void setupCtx(); /* create the window and make the context current */
void setupOSDBuffer(); /* create the OSD buffer */
#endif
struct {
int width; /* width and height, fixed for a framebuffer instance */
int height;
bool blit;
#if USE_OPENGL
GLuint osdtex; /* holds the OSD texture */
GLuint pbo; /* PBO we use for transfer to texture */
GLuint displaytex; /* holds the display texture */
GLuint displaypbo;
#endif
} mState;
void bltOSDBuffer();
void bltDisplayBuffer();
};
#endif

View File

@@ -0,0 +1,45 @@
/*
* determine the capabilities of the hardware.
* part of libstb-hal
*
* (C) 2010-2012,2016 Stefan Seyfried
*
* License: GPL v2 or later
*/
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <hardware_caps.h>
#include <sys/utsname.h>
static int initialized = 0;
static hw_caps_t caps;
hw_caps_t *get_hwcaps(void)
{
struct utsname u;
if (initialized)
return &caps;
memset(&caps, 0, sizeof(hw_caps_t));
initialized = 1;
caps.can_shutdown = 1; /* for testing */
caps.display_type = HW_DISPLAY_LINE_TEXT;
caps.has_HDMI = 1;
caps.display_xres = 8;
caps.display_can_deepstandby = 0;
caps.display_has_statusline = 0;
strcpy(caps.boxvendor, "Generic");
strcpy(caps.boxname, "PC");
if (! uname(&u))
strncpy(caps.boxarch, u.machine, sizeof(caps.boxarch));
else
fprintf(stderr, "%s: uname() failed: %m\n", __func__);
return &caps;
}

57
libgeneric-pc/init.cpp Normal file
View File

@@ -0,0 +1,57 @@
#include <cstring>
#include <cstdlib>
#include <unistd.h>
#include "init_td.h"
#include "glfb.h"
#include "lt_debug.h"
#define lt_debug(args...) _lt_debug(TRIPLE_DEBUG_INIT, NULL, args)
#define lt_info(args...) _lt_info(TRIPLE_DEBUG_INIT, NULL, args)
static bool initialized = false;
GLFramebuffer *glfb = NULL;
bool HAL_nodec = false;
void init_td_api()
{
if (!initialized)
lt_debug_init();
lt_info("%s begin, initialized=%d, debug=0x%02x\n", __func__, (int)initialized, debuglevel);
if (! glfb) {
int x = 1280, y = 720; /* default OSD FB resolution */
/*
* export GLFB_RESOLUTION=720,576
* to restore old default behviour
*/
const char *tmp = getenv("GLFB_RESOLUTION");
const char *p = NULL;
if (tmp)
p = strchr(tmp, ',');
if (p) {
x = atoi(tmp);
y = atoi(p + 1);
}
lt_info("%s: setting GL Framebuffer size to %dx%d\n", __func__, x, y);
if (!p)
lt_info("%s: export GLFB_RESOLUTION=\"<w>,<h>\" to set another resolution\n", __func__);
glfb = new GLFramebuffer(x, y); /* hard coded to PAL resolution for now */
}
/* allow disabling of Audio/video decoders in case we just want to
* valgrind-check other parts... export HAL_NOAVDEC=1 */
if (getenv("HAL_NOAVDEC"))
HAL_nodec = true;
/* hack, this triggers that the simple_display thread does not blit() once per second... */
setenv("SPARK_NOBLIT", "1", 1);
initialized = true;
}
void shutdown_td_api()
{
lt_info("%s, initialized = %d\n", __func__, (int)initialized);
if (glfb)
delete glfb;
glfb = NULL;
initialized = false;
}

150
libgeneric-pc/playback.cpp Normal file
View File

@@ -0,0 +1,150 @@
#include <stdio.h>
#include "playback_lib.h"
static const char * FILENAME = "playback-dummy";
bool cPlayback::Open(playmode_t)
{
return 0;
}
void cPlayback::Close(void)
{
}
bool cPlayback::Start(std::string filename, std::string headers)
{
return Start((char*) filename.c_str(),0,0,0,0,0, headers);
}
bool cPlayback::Start(char *filename, int vpid, int vtype, int apid, int ac3, int duration, std::string headers)
{
printf("%s:%s - filename=%s vpid=%u vtype=%d apid=%u ac3=%d duration=%i\n",
FILENAME, __func__, filename, vpid, vtype, apid, ac3, duration);
return true;
}
bool cPlayback::SetAPid(int pid, bool /*ac3*/)
{
printf("%s:%s pid %i\n", FILENAME, __func__, pid);
return true;
}
bool cPlayback::SelectSubtitles(int pid, std::string charset)
{
printf("%s:%s pid %i, charset: %s\n", FILENAME, __func__, pid, charset.c_str());
return true;
}
bool cPlayback::SetSpeed(int speed)
{
printf("%s:%s playing %d speed %d\n", FILENAME, __func__, playing, speed);
return true;
}
bool cPlayback::GetSpeed(int &/*speed*/) const
{
return true;
}
bool cPlayback::GetPosition(int &position, int &duration)
{
printf("%s:%s %d %d\n", FILENAME, __func__, position, duration);
position = 0;
duration = 0;
return true;
}
bool cPlayback::SetPosition(int position, bool)
{
printf("%s:%s %d\n", FILENAME, __func__,position);
return true;
}
void cPlayback::FindAllPids(int *, unsigned int *, unsigned int *numpida, std::string *)
{
printf("%s:%s\n", FILENAME, __func__);
*numpida = 0;
}
void cPlayback::FindAllSubtitlePids(int * /*pids*/, unsigned int *numpids, std::string * /*language*/)
{
*numpids = 0;
}
bool cPlayback::SetSubtitlePid(int /*pid*/)
{
return true;
}
void cPlayback::GetPts(uint64_t &/*pts*/)
{
}
bool cPlayback::SetTeletextPid(int /*pid*/)
{
return true;
}
void cPlayback::FindAllTeletextsubtitlePids(int *, unsigned int *numpids, std::string *, int *, int *)
{
*numpids = 0;
}
void cPlayback::SuspendSubtitle(bool /*b*/)
{
}
void cPlayback::RequestAbort()
{
}
int cPlayback::GetTeletextPid(void)
{
return -1;
}
void cPlayback::FindAllSubs(int * /*pids*/, unsigned int * /*supp*/, unsigned int *num, std::string * /*lang*/)
{
printf("%s:%s\n", FILENAME, __func__);
*num = 0;
}
void cPlayback::GetChapters(std::vector<int> &positions, std::vector<std::string> &titles)
{
positions.clear();
titles.clear();
}
void cPlayback::GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values)
{
keys.clear();
values.clear();
}
void cPlayback::GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current)
{
playlists.clear();
titles.clear();
current = 0;
}
void cPlayback::SetTitle(int /*title*/)
{
}
uint64_t cPlayback::GetReadCount(void)
{
return 0;
}
cPlayback::cPlayback(int /*num*/)
{
printf("%s:%s\n", FILENAME, __func__);
}
cPlayback::~cPlayback()
{
printf("%s:%s\n", FILENAME, __func__);
}

View File

@@ -0,0 +1,107 @@
/*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
*/
#ifndef __PLAYBACK_CS_H
#define __PLAYBACK_CS_H
#include <string>
#include <stdint.h>
#include <vector>
#include <config.h>
typedef enum
{
STATE_STOP,
STATE_PLAY,
STATE_PAUSE,
STATE_FF,
STATE_REW,
STATE_SLOW
} playstate_t;
typedef enum
{
PLAYMODE_TS = 0,
PLAYMODE_FILE,
} playmode_t;
struct AVFormatContext;
class cPlayback
{
private:
bool playing, first;
bool decoders_closed;
int mSpeed;
int mAudioStream;
int init_jump;
public:
playstate_t playstate;
cPlayback(int);
bool Open(playmode_t PlayMode);
void Close(void);
bool Start(char *filename, int vpid, int vtype, int apid, int ac3, int duration, std::string headers = "");
bool Start(std::string filename, std::string headers = "");
bool Play(void);
bool SyncAV(void);
bool Stop(void);
bool SetAPid(int pid, bool ac3);
bool SetSubtitlePid(int pid);
bool SetTeletextPid(int pid);
void trickSeek(int ratio);
bool SetSpeed(int speed);
bool SetSlow(int slow);
bool GetSpeed(int &speed) const;
bool GetPosition(int &position, int &duration);
void GetPts(uint64_t &pts);
int GetAPid(void);
int GetVPid(void);
int GetSubtitlePid(void);
bool SetPosition(int position, bool absolute = false);
void FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string *language);
void FindAllPids(uint16_t *apids, unsigned short *ac3flags, uint16_t *numpida, std::string *language) { FindAllPids((int*) apids, (unsigned int*) ac3flags, (unsigned int*) numpida, language); };
void FindAllSubtitlePids(int *pids, unsigned int *numpids, std::string *language);
void FindAllTeletextsubtitlePids(int *pids, unsigned int *numpidt, std::string *tlanguage, int *mags, int *pages);
void RequestAbort(void);
void FindAllSubs(int *pids, unsigned int *supported, unsigned int *numpida, std::string *language);
void FindAllSubs(uint16_t *pids, unsigned short *supported, uint16_t *numpida, std::string *language) { FindAllSubs((int*) pids, (unsigned int*) supported, (unsigned int*) numpida, language); };
bool SelectSubtitles(int pid, std::string charset = "");
void GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current);
void SetTitle(int title);
uint64_t GetReadCount(void);
void GetChapters(std::vector<int> &positions, std::vector<std::string> &titles);
void GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values);
AVFormatContext *GetAVFormatContext();
void ReleaseAVFormatContext();
std::string extra_headers;
std::string user_agent;
//
~cPlayback();
void getMeta();
};
#endif

View File

@@ -0,0 +1,833 @@
/*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
*/
#include <unistd.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/stat.h>
#include <pthread.h>
#include <syscall.h>
#include "dmx_lib.h"
#include "audio_lib.h"
#include "video_lib.h"
#include "glfb.h"
#include "playback_gst.h"
#include "lt_debug.h"
#define lt_debug(args...) _lt_debug(HAL_DEBUG_PLAYBACK, this, args)
#define lt_info(args...) _lt_info(HAL_DEBUG_PLAYBACK, this, args)
#define lt_debug_c(args...) _lt_debug(HAL_DEBUG_PLAYBACK, NULL, args)
#define lt_info_c(args...) _lt_info(HAL_DEBUG_PLAYBACK, NULL, args)
static const char * FILENAME = "[playback.cpp]";
#include <gst/gst.h>
#include <gst/pbutils/missing-plugins.h>
#include <gst/interfaces/xoverlay.h>
typedef enum
{
GST_PLAY_FLAG_VIDEO = 0x00000001,
GST_PLAY_FLAG_AUDIO = 0x00000002,
GST_PLAY_FLAG_TEXT = 0x00000004,
GST_PLAY_FLAG_VIS = 0x00000008,
GST_PLAY_FLAG_SOFT_VOLUME = 0x00000010,
GST_PLAY_FLAG_NATIVE_AUDIO = 0x00000020,
GST_PLAY_FLAG_NATIVE_VIDEO = 0x00000040,
GST_PLAY_FLAG_DOWNLOAD = 0x00000080,
GST_PLAY_FLAG_BUFFERING = 0x000000100
} GstPlayFlags;
GstElement * m_gst_playbin = NULL;
GstElement * audioSink = NULL;
GstElement * videoSink = NULL;
gchar * uri = NULL;
GstTagList * m_stream_tags = 0;
static int end_eof = 0;
extern GLFramebuffer *glfb;
gint match_sinktype(GstElement *element, gpointer type)
{
return strcmp(g_type_name(G_OBJECT_TYPE(element)), (const char*)type);
}
GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
{
gchar * sourceName;
// source
GstObject * source;
source = GST_MESSAGE_SRC(msg);
if (!GST_IS_OBJECT(source))
return GST_BUS_DROP;
sourceName = gst_object_get_name(source);
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
{
g_message("End-of-stream");
end_eof = 1;
break;
}
case GST_MESSAGE_ERROR:
{
gchar * debug;
GError *err;
gst_message_parse_error(msg, &err, &debug);
g_free (debug);
lt_info_c( "%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName );
if ( err->domain == GST_STREAM_ERROR )
{
if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
{
if ( g_strrstr(sourceName, "videosink") )
lt_info_c( "%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
else if ( g_strrstr(sourceName, "audiosink") )
lt_info_c( "%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
}
}
g_error_free(err);
end_eof = 1; // NOTE: just to exit
break;
}
case GST_MESSAGE_INFO:
{
gchar *debug;
GError *inf;
gst_message_parse_info (msg, &inf, &debug);
g_free (debug);
if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
{
if ( g_strrstr(sourceName, "videosink") )
lt_info_c( "%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
}
g_error_free(inf);
break;
}
case GST_MESSAGE_TAG:
{
GstTagList *tags, *result;
gst_message_parse_tag(msg, &tags);
result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
if (result)
{
if (m_stream_tags)
gst_tag_list_free(m_stream_tags);
m_stream_tags = result;
}
const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
if ( gv_image )
{
GstBuffer *buf_image;
buf_image = gst_value_get_buffer (gv_image);
int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
if(fd >= 0)
{
int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
close(fd);
lt_info_c( "%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__ , ret);
}
//FIXME: how shall playback handle this event???
}
gst_tag_list_free(tags);
lt_info_c( "%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event???
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
break;
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
if(old_state == new_state)
break;
lt_info_c( "%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
switch(transition)
{
case GST_STATE_CHANGE_NULL_TO_READY:
{
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
GstIterator *children;
if (audioSink)
{
gst_object_unref(GST_OBJECT(audioSink));
audioSink = NULL;
}
if (videoSink)
{
gst_object_unref(GST_OBJECT(videoSink));
videoSink = NULL;
}
children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin));
audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink"));
videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink"));
gst_iterator_free(children);
} break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
{
} break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
{
} break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
{
if (audioSink)
{
gst_object_unref(GST_OBJECT(audioSink));
audioSink = NULL;
}
if (videoSink)
{
gst_object_unref(GST_OBJECT(videoSink));
videoSink = NULL;
}
} break;
case GST_STATE_CHANGE_READY_TO_NULL:
{
} break;
}
break;
}
#if 0
case GST_MESSAGE_ELEMENT:
{
if(gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id"))
{
// set window id
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), glfb->getWindowID());
// reshape window
gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight());
// sync frames
gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)));
}
}
#endif
break;
default:
break;
}
return GST_BUS_DROP;
}
cPlayback::cPlayback(int num)
{
lt_info( "%s:%s\n", FILENAME, __FUNCTION__);
const gchar *nano_str;
guint major, minor, micro, nano;
gst_init(NULL, NULL);
gst_version (&major, &minor, &micro, &nano);
if (nano == 1)
nano_str = "(CVS)";
else if (nano == 2)
nano_str = "(Prerelease)";
else
nano_str = "";
lt_info( "%s:%s - This program is linked against GStreamer %d.%d.%d %s\n",
FILENAME, __FUNCTION__,
major, minor, micro, nano_str);
mAudioStream = 0;
mSpeed = 0;
playing = false;
playstate = STATE_STOP;
}
cPlayback::~cPlayback()
{
lt_info( "%s:%s\n", FILENAME, __FUNCTION__);
//FIXME: all deleting stuff is done in Close()
}
//Used by Fileplay
bool cPlayback::Open(playmode_t PlayMode)
{
lt_info("%s: PlayMode %d\n", __func__, PlayMode);
return true;
}
// used by movieplay
void cPlayback::Close(void)
{
lt_info( "%s:%s\n", FILENAME, __FUNCTION__);
Stop();
// disconnect bus handler
if (m_gst_playbin)
{
// disconnect sync handler callback
GstBus * bus = gst_pipeline_get_bus(GST_PIPELINE (m_gst_playbin));
gst_bus_set_sync_handler(bus, NULL, NULL);
gst_object_unref(bus);
lt_info( "%s:%s - GST bus handler closed\n", FILENAME, __FUNCTION__);
}
if (m_stream_tags)
gst_tag_list_free(m_stream_tags);
// close gst
if (m_gst_playbin)
{
if (audioSink)
{
gst_object_unref(GST_OBJECT(audioSink));
audioSink = NULL;
lt_info( "%s:%s - GST audio Sink closed\n", FILENAME, __FUNCTION__);
}
if (videoSink)
{
gst_object_unref(GST_OBJECT(videoSink));
videoSink = NULL;
lt_info( "%s:%s - GST video Sink closed\n", FILENAME, __FUNCTION__);
}
// unref m_gst_playbin
gst_object_unref (GST_OBJECT (m_gst_playbin));
lt_info( "%s:%s - GST playbin closed\n", FILENAME, __FUNCTION__);
m_gst_playbin = NULL;
}
}
// start
bool cPlayback::Start(std::string filename, std::string headers)
{
return Start((char*) filename.c_str(),0,0,0,0,0, headers);
}
bool cPlayback::Start(char *filename, int /*vpid*/, int /*vtype*/, int /*apid*/, int /*ac3*/, int /*duration*/, std::string headers)
{
lt_info( "%s:%s\n", FILENAME, __FUNCTION__);
mAudioStream = 0;
//create playback path
char file[400] = {""};
bool isHTTP = false;
if(!strncmp("http://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("file://", filename, 7))
{
isHTTP = false;
}
else if(!strncmp("upnp://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("rtmp://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("rtsp://", filename, 7))
{
isHTTP = true;
}
else if(!strncmp("mms://", filename, 6))
{
isHTTP = true;
}
else
strcat(file, "file://");
strcat(file, filename);
if (isHTTP)
uri = g_uri_escape_string(filename, G_URI_RESERVED_CHARS_GENERIC_DELIMITERS, true);
else
uri = g_filename_to_uri(filename, NULL, NULL);
lt_info("%s:%s - filename=%s\n", FILENAME, __FUNCTION__, filename);
// create gst pipeline
m_gst_playbin = gst_element_factory_make("playbin2", "playbin");
if(m_gst_playbin)
{
lt_info("%s:%s - m_gst_playbin\n", FILENAME, __FUNCTION__);
guint flags;
g_object_get(G_OBJECT (m_gst_playbin), "flags", &flags, NULL);
/* avoid video conversion, let the (hardware) sinks handle that */
flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
/* volume control is done by hardware */
flags &= ~GST_PLAY_FLAG_SOFT_VOLUME;
g_object_set(G_OBJECT (m_gst_playbin), "uri", uri, NULL);
g_object_set(G_OBJECT (m_gst_playbin), "flags", flags, NULL);
//gstbus handler
GstBus * bus = gst_pipeline_get_bus( GST_PIPELINE(m_gst_playbin) );
gst_bus_set_sync_handler(bus, Gst_bus_call, NULL);
gst_object_unref(bus);
// state playing
gst_element_set_state(GST_ELEMENT(m_gst_playbin), GST_STATE_PLAYING);
playing = true;
playstate = STATE_PLAY;
}
else
{
lt_info("%s:%s - failed to create GStreamer pipeline!, sorry we can not play\n", FILENAME, __FUNCTION__);
playing = false;
return false;
}
g_free(uri);
// set buffer size
/* increase the default 2 second / 2 MB buffer limitations to 5s / 5MB */
int m_buffer_size = 5*1024*1024;
//g_object_set(G_OBJECT(m_gst_playbin), "buffer-duration", 5LL * GST_SECOND, NULL);
g_object_set(G_OBJECT(m_gst_playbin), "buffer-size", m_buffer_size, NULL);
return true;
}
bool cPlayback::Play(void)
{
lt_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
if(playing == true)
return true;
if(m_gst_playbin)
{
gst_element_set_state(GST_ELEMENT(m_gst_playbin), GST_STATE_PLAYING);
playing = true;
playstate = STATE_PLAY;
}
lt_info("%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
return playing;
}
bool cPlayback::Stop(void)
{
if(playing == false)
return false;
lt_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
// stop
if(m_gst_playbin)
{
gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
}
playing = false;
lt_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
playstate = STATE_STOP;
return true;
}
bool cPlayback::SetAPid(int pid , bool /*ac3*/)
{
lt_info("%s: pid %i\n", __func__, pid);
int current_audio;
if(pid != mAudioStream)
{
g_object_set (G_OBJECT (m_gst_playbin), "current-audio", pid, NULL);
printf("%s: switched to audio stream %i\n", __FUNCTION__, pid);
mAudioStream = pid;
}
return true;
}
void cPlayback::trickSeek(int ratio)
{
bool validposition = false;
gint64 pos = 0;
int position;
int duration;
if( GetPosition(position, duration) )
{
validposition = true;
pos = position;
}
gst_element_set_state(m_gst_playbin, GST_STATE_PLAYING);
if (validposition)
{
if(ratio >= 0.0)
gst_element_seek(m_gst_playbin, ratio, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP), GST_SEEK_TYPE_SET, pos, GST_SEEK_TYPE_SET, -1);
else
gst_element_seek(m_gst_playbin, ratio, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP), GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos);
}
}
bool cPlayback::SetSpeed(int speed)
{
lt_info( "%s:%s speed %d\n", FILENAME, __FUNCTION__, speed);
if(playing == false)
return false;
if(m_gst_playbin)
{
// pause
if(speed == 0)
{
gst_element_set_state(m_gst_playbin, GST_STATE_PAUSED);
//trickSeek(0);
playstate = STATE_PAUSE;
}
// play/continue
else if(speed == 1)
{
trickSeek(1);
//gst_element_set_state(m_gst_playbin, GST_STATE_PLAYING);
//
playstate = STATE_PLAY;
}
//ff
else if(speed > 1)
{
trickSeek(speed);
//
playstate = STATE_FF;
}
//rf
else if(speed < 0)
{
trickSeek(speed);
//
playstate = STATE_REW;
}
}
mSpeed = speed;
return true;
}
bool cPlayback::SetSlow(int slow)
{
lt_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
if(playing == false)
return false;
if(m_gst_playbin)
{
trickSeek(0.5);
}
playstate = STATE_SLOW;
mSpeed = slow;
return true;
}
bool cPlayback::GetSpeed(int &speed) const
{
speed = mSpeed;
return true;
}
// in milliseconds
bool cPlayback::GetPosition(int &position, int &duration)
{
if(playing == false)
return false;
//EOF
if(end_eof)
{
end_eof = 0;
return false;
}
if(m_gst_playbin)
{
//position
GstFormat fmt = GST_FORMAT_TIME; //Returns time in nanosecs
gint64 pts = 0;
unsigned long long int sec = 0;
gst_element_query_position(m_gst_playbin, &fmt, &pts);
position = pts / 1000000.0;
// duration
GstFormat fmt_d = GST_FORMAT_TIME; //Returns time in nanosecs
double length = 0;
gint64 len;
gst_element_query_duration(m_gst_playbin, &fmt_d, &len);
length = len / 1000000.0;
if(length < 0)
length = 0;
duration = (int)(length);
}
return true;
}
bool cPlayback::SetPosition(int position, bool absolute)
{
lt_info("%s: pos %d abs %d playing %d\n", __func__, position, absolute, playing);
if(playing == false)
return false;
gint64 time_nanoseconds;
gint64 pos;
GstFormat fmt = GST_FORMAT_TIME;
if(m_gst_playbin)
{
gst_element_query_position(m_gst_playbin, &fmt, &pos);
time_nanoseconds = pos + (position * 1000000.0);
if(time_nanoseconds < 0)
time_nanoseconds = 0;
gst_element_seek(m_gst_playbin, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, time_nanoseconds, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
}
return true;
}
void cPlayback::FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string * language)
{
lt_info( "%s:%s\n", FILENAME, __FUNCTION__);
if(m_gst_playbin)
{
gint i, n_audio = 0;
//GstStructure * structure = NULL;
// get audio
g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL);
printf("%s: %d audio\n", __FUNCTION__, n_audio);
if(n_audio == 0)
return;
for (i = 0; i < n_audio; i++)
{
// apids
apids[i]=i;
GstPad * pad = 0;
g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
GstCaps * caps = gst_pad_get_negotiated_caps(pad);
if (!caps)
continue;
GstStructure * structure = gst_caps_get_structure(caps, 0);
//const gchar *g_type = gst_structure_get_name(structure);
//if (!structure)
//return atUnknown;
//ac3flags[0] = 0;
// ac3flags
if ( gst_structure_has_name (structure, "audio/mpeg"))
{
gint mpegversion, layer = -1;
if (!gst_structure_get_int (structure, "mpegversion", &mpegversion))
//return atUnknown;
ac3flags[i] = 0;
switch (mpegversion)
{
case 1:
/*
{
gst_structure_get_int (structure, "layer", &layer);
if ( layer == 3 )
return atMP3;
else
return atMPEG;
ac3flags[0] = 4;
break;
}
*/
ac3flags[i] = 4;
case 2:
//return atAAC;
ac3flags[i] = 5;
case 4:
//return atAAC;
ac3flags[i] = 5;
default:
//return atUnknown;
ac3flags[i] = 0;
}
}
else if ( gst_structure_has_name (structure, "audio/x-ac3") || gst_structure_has_name (structure, "audio/ac3") )
//return atAC3;
ac3flags[i] = 1;
else if ( gst_structure_has_name (structure, "audio/x-dts") || gst_structure_has_name (structure, "audio/dts") )
//return atDTS;
ac3flags[i] = 6;
else if ( gst_structure_has_name (structure, "audio/x-raw-int") )
//return atPCM;
ac3flags[i] = 0;
gst_caps_unref(caps);
}
// numpids
*numpida=i;
}
}
void cPlayback::getMeta()
{
if(playing)
return;
}
bool cPlayback::SyncAV(void)
{
lt_info( "%s:%s playing %d\n", FILENAME, __FUNCTION__, playing);
if(playing == false )
return false;
return true;
}
void cPlayback::RequestAbort()
{
}
void cPlayback::FindAllSubs(int * /*pids*/, unsigned int * /*supp*/, unsigned int *num, std::string * /*lang*/)
{
printf("%s:%s\n", FILENAME, __func__);
*num = 0;
}
void cPlayback::GetChapters(std::vector<int> &positions, std::vector<std::string> &titles)
{
positions.clear();
titles.clear();
}
void cPlayback::GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current)
{
playlists.clear();
titles.clear();
current = 0;
}
void cPlayback::SetTitle(int /*title*/)
{
}
bool cPlayback::SelectSubtitles(int pid, std::string charset)
{
printf("%s:%s pid %i, charset: %s\n", FILENAME, __func__, pid, charset.c_str());
return true;
}
void cPlayback::GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values)
{
keys.clear();
values.clear();
}
void cPlayback::FindAllTeletextsubtitlePids(int *, unsigned int *numpids, std::string *, int *, int *)
{
*numpids = 0;
}
void cPlayback::FindAllSubtitlePids(int * /*pids*/, unsigned int *numpids, std::string * /*language*/)
{
*numpids = 0;
}
bool cPlayback::SetSubtitlePid(int /*pid*/)
{
return true;
}
void cPlayback::GetPts(uint64_t &/*pts*/)
{
}
bool cPlayback::SetTeletextPid(int /*pid*/)
{
return true;
}
uint64_t cPlayback::GetReadCount()
{
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
#ifndef __PLAYBACK_H
#define __PLAYBACK_H
#include <string>
#include <stdint.h>
#include <vector>
typedef enum {
PLAYMODE_TS = 0,
PLAYMODE_FILE,
} playmode_t;
struct AVFormatContext;
class cPlayback
{
private:
bool playing;
int mAudioStream;
int mSubtitleStream;
int mTeletextStream;
public:
cPlayback(int);
bool Open(playmode_t PlayMode);
void Close(void);
bool Start(char *filename, int vpid, int vtype, int apid, int ac3, int duration, std::string headers = "");
bool Start(std::string filename, std::string headers = "");
bool SetAPid(int pid, bool ac3);
bool SetSubtitlePid(int pid);
bool SetTeletextPid(int pid);
int GetAPid(void) { return mAudioStream; }
int GetVPid(void);
int GetSubtitlePid(void) { return mSubtitleStream; }
int GetTeletextPid(void);
void SuspendSubtitle(bool);
int GetFirstTeletextPid(void);
bool SetSpeed(int speed);
bool GetSpeed(int &speed) const;
bool GetPosition(int &position, int &duration);
void GetPts(uint64_t &pts);
bool SetPosition(int position, bool absolute = false);
void FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string *language);
void FindAllPids(uint16_t *apids, unsigned short *ac3flags, uint16_t *numpida, std::string *language) { FindAllPids((int*) apids, (unsigned int*) ac3flags, (unsigned int*) numpida, language); };
void FindAllSubtitlePids(int *pids, unsigned int *numpids, std::string *language);
void FindAllTeletextsubtitlePids(int *pids, unsigned int *numpidt, std::string *tlanguage, int *mags, int *pages);
void RequestAbort(void);
bool IsPlaying(void) { return false; }
uint64_t GetReadCount(void);
void FindAllSubs(int *pids, unsigned int *supported, unsigned int *numpida, std::string *language);
void FindAllSubs(uint16_t *pids, unsigned short *supported, uint16_t *numpida, std::string *language) { FindAllSubs((int*) pids, (unsigned int*) supported, (unsigned int*) numpida, language); };
bool SelectSubtitles(int pid, std::string charset = "");
void GetTitles(std::vector<int> &playlists, std::vector<std::string> &titles, int &current);
void SetTitle(int title);
void GetChapters(std::vector<int> &positions, std::vector<std::string> &titles);
void GetMetadata(std::vector<std::string> &keys, std::vector<std::string> &values);
//
~cPlayback();
AVFormatContext *GetAVFormatContext(){ return NULL; }
void ReleaseAVFormatContext() {}
};
#endif

1
libgeneric-pc/record.cpp Symbolic link
View File

@@ -0,0 +1 @@
../libspark/record.cpp

1
libgeneric-pc/record_lib.h Symbolic link
View File

@@ -0,0 +1 @@
../libspark/record_lib.h

788
libgeneric-pc/video.cpp Normal file
View File

@@ -0,0 +1,788 @@
/*
* (C) 2002-2003 Andreas Oberritter <obi@tuxbox.org>
* (C) 2010-2012 Stefan Seyfried
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, 51 Franklin Street, Suite 500 Boston, MA 02110-1335 USA
*
* cVideo implementation with decoder.
* uses ffmpeg <http://ffmpeg.org> for demuxing / decoding
* decoded frames are stored in SWFramebuffer class
*
* TODO: buffer handling surely needs some locking...
*/
#include "config.h"
#include <unistd.h>
#include <cstring>
#include <cstdio>
#include <cstdlib>
extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}
/* ffmpeg buf 32k */
#define INBUF_SIZE 0x8000
/* my own buf 256k */
#define DMX_BUF_SZ 0x20000
#if USE_OPENGL
#define VDEC_PIXFMT AV_PIX_FMT_RGB32
#endif
#if USE_CLUTTER
#define VDEC_PIXFMT AV_PIX_FMT_BGR24
#endif
#include "video_lib.h"
#include "dmx_hal.h"
#include "glfb_priv.h"
#include "lt_debug.h"
#define lt_debug(args...) _lt_debug(TRIPLE_DEBUG_VIDEO, this, args)
#define lt_info(args...) _lt_info(TRIPLE_DEBUG_VIDEO, this, args)
#define lt_info_c(args...) _lt_info(TRIPLE_DEBUG_VIDEO, NULL, args)
cVideo *videoDecoder = NULL;
extern cDemux *videoDemux;
extern GLFbPC *glfb_priv;
int system_rev = 0;
extern bool HAL_nodec;
static uint8_t *dmxbuf;
static int bufpos;
static const AVRational aspect_ratios[6] = {
{ 1, 1 },
{ 4, 3 },
{ 14, 9 },
{ 16, 9 },
{ 20, 9 },
{ -1,-1 }
};
cVideo::cVideo(int, void *, void *, unsigned int)
{
lt_debug("%s\n", __func__);
av_register_all();
if (!HAL_nodec)
dmxbuf = (uint8_t *)malloc(DMX_BUF_SZ);
bufpos = 0;
thread_running = false;
w_h_changed = false;
dec_w = dec_h = 0;
buf_num = 0;
buf_in = 0;
buf_out = 0;
pig_x = pig_y = pig_w = pig_h = 0;
pig_changed = false;
display_aspect = DISPLAY_AR_16_9;
display_crop = DISPLAY_AR_MODE_LETTERBOX;
v_format = VIDEO_FORMAT_MPEG2;
output_h = 0;
stillpicture = false;
}
cVideo::~cVideo(void)
{
Stop();
/* ouch :-( */
videoDecoder = NULL;
}
int cVideo::setAspectRatio(int vformat, int cropping)
{
lt_info("%s(%d, %d)\n", __func__, vformat, cropping);
if (vformat >= 0)
display_aspect = (DISPLAY_AR) vformat;
if (cropping >= 0)
display_crop = (DISPLAY_AR_MODE) cropping;
if (display_aspect < DISPLAY_AR_RAW && output_h > 0) /* don't know what to do with this */
glfb_priv->setOutputFormat(aspect_ratios[display_aspect], output_h, display_crop);
return 0;
}
int cVideo::getAspectRatio(void)
{
buf_m.lock();
int ret = 0;
int w, h, ar;
AVRational a;
if (buf_num == 0)
goto out;
a = buffers[buf_out].AR();
w = buffers[buf_out].width();
h = buffers[buf_out].height();
if (a.den == 0 || h == 0)
goto out;
ar = w * 100 * a.num / h / a.den;
if (ar < 100 || ar > 225) /* < 4:3, > 20:9 */
; /* ret = 0: N/A */
else if (ar < 140) /* 4:3 */
ret = 1;
else if (ar < 165) /* 14:9 */
ret = 2;
else if (ar < 200) /* 16:9 */
ret = 3;
else
ret = 4; /* 20:9 */
out:
buf_m.unlock();
return ret;
}
int cVideo::setCroppingMode(int)
{
return 0;
}
int cVideo::Start(void *, unsigned short, unsigned short, void *)
{
lt_debug("%s running %d >\n", __func__, thread_running);
if (!thread_running && !HAL_nodec)
OpenThreads::Thread::start();
lt_debug("%s running %d <\n", __func__, thread_running);
return 0;
}
int cVideo::Stop(bool)
{
lt_debug("%s running %d >\n", __func__, thread_running);
if (thread_running) {
thread_running = false;
OpenThreads::Thread::join();
}
lt_debug("%s running %d <\n", __func__, thread_running);
return 0;
}
int cVideo::setBlank(int)
{
return 1;
}
int cVideo::GetVideoSystem()
{
int current_video_system = VIDEO_STD_1080I50;
if(dec_w < 720)
current_video_system = VIDEO_STD_PAL;
else if(dec_w > 720 && dec_w <= 1280)
current_video_system = VIDEO_STD_720P50;
return current_video_system;
}
int cVideo::SetVideoSystem(int system, bool)
{
int h;
switch(system)
{
case VIDEO_STD_NTSC:
case VIDEO_STD_480P:
h = 480;
break;
case VIDEO_STD_1080I60:
case VIDEO_STD_1080I50:
case VIDEO_STD_1080P30:
case VIDEO_STD_1080P24:
case VIDEO_STD_1080P25:
case VIDEO_STD_1080P50:
h = 1080;
break;
case VIDEO_STD_720P50:
case VIDEO_STD_720P60:
h = 720;
break;
case VIDEO_STD_AUTO:
lt_info("%s: VIDEO_STD_AUTO not implemented\n", __func__);
// fallthrough
case VIDEO_STD_SECAM:
case VIDEO_STD_PAL:
case VIDEO_STD_576P:
h = 576;
break;
default:
lt_info("%s: unhandled value %d\n", __func__, system);
return 0;
}
// v_std = (VIDEO_STD) system;
output_h = h;
if (display_aspect < DISPLAY_AR_RAW && output_h > 0) /* don't know what to do with this */
glfb_priv->setOutputFormat(aspect_ratios[display_aspect], output_h, display_crop);
return 0;
}
int cVideo::getPlayState(void)
{
return VIDEO_PLAYING;
}
void cVideo::SetVideoMode(analog_mode_t)
{
}
void cVideo::ShowPicture(const char *fname)
{
lt_info("%s(%s)\n", __func__, fname);
if (access(fname, R_OK))
return;
still_m.lock();
stillpicture = true;
buf_num = 0;
buf_in = 0;
buf_out = 0;
still_m.unlock();
unsigned int i;
int stream_id = -1;
int got_frame = 0;
int len;
AVFormatContext *avfc = NULL;
AVCodecContext *c = NULL;
AVCodecParameters *p = NULL;
AVCodec *codec;
AVFrame *frame, *rgbframe;
AVPacket avpkt;
if (avformat_open_input(&avfc, fname, NULL, NULL) < 0) {
lt_info("%s: Could not open file %s\n", __func__, fname);
return;
}
if (avformat_find_stream_info(avfc, NULL) < 0) {
lt_info("%s: Could not find file info %s\n", __func__, fname);
goto out_close;
}
for (i = 0; i < avfc->nb_streams; i++) {
if (avfc->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
stream_id = i;
break;
}
}
if (stream_id < 0)
goto out_close;
p = avfc->streams[stream_id]->codecpar;
codec = avcodec_find_decoder(p->codec_id);
c = avcodec_alloc_context3(codec);
if (avcodec_open2(c, codec, NULL) < 0) {
lt_info("%s: Could not find/open the codec, id 0x%x\n", __func__, p->codec_id);
goto out_close;
}
frame = av_frame_alloc();
rgbframe = av_frame_alloc();
if (!frame || !rgbframe) {
lt_info("%s: Could not allocate video frame\n", __func__);
goto out_free;
}
av_init_packet(&avpkt);
if (av_read_frame(avfc, &avpkt) < 0) {
lt_info("%s: av_read_frame < 0\n", __func__);
goto out_free;
}
len = avcodec_decode_video2(c, frame, &got_frame, &avpkt);
if (len < 0) {
lt_info("%s: avcodec_decode_video2 %d\n", __func__, len);
av_packet_unref(&avpkt);
goto out_free;
}
if (avpkt.size > len)
lt_info("%s: WARN: pkt->size %d != len %d\n", __func__, avpkt.size, len);
if (got_frame) {
unsigned int need = av_image_get_buffer_size(VDEC_PIXFMT, c->width, c->height, 1);
struct SwsContext *convert = sws_getContext(c->width, c->height, c->pix_fmt,
c->width, c->height, VDEC_PIXFMT,
SWS_BICUBIC, 0, 0, 0);
if (!convert)
lt_info("%s: ERROR setting up SWS context\n", __func__);
else {
buf_m.lock();
SWFramebuffer *f = &buffers[buf_in];
if (f->size() < need)
f->resize(need);
av_image_fill_arrays(rgbframe->data, rgbframe->linesize, &(*f)[0], VDEC_PIXFMT,
c->width, c->height, 1);
sws_scale(convert, frame->data, frame->linesize, 0, c->height,
rgbframe->data, rgbframe->linesize);
sws_freeContext(convert);
f->width(c->width);
f->height(c->height);
f->pts(AV_NOPTS_VALUE);
AVRational a = av_guess_sample_aspect_ratio(avfc, avfc->streams[stream_id], frame);
f->AR(a);
buf_in++;
buf_in %= VDEC_MAXBUFS;
buf_num++;
if (buf_num > (VDEC_MAXBUFS - 1)) {
lt_debug("%s: buf_num overflow\n", __func__);
buf_out++;
buf_out %= VDEC_MAXBUFS;
buf_num--;
}
buf_m.unlock();
}
}
av_packet_unref(&avpkt);
out_free:
avcodec_close(c);
av_free(c);
av_frame_free(&frame);
av_frame_free(&rgbframe);
out_close:
avformat_close_input(&avfc);
lt_debug("%s(%s) end\n", __func__, fname);
}
void cVideo::StopPicture()
{
lt_info("%s\n", __func__);
still_m.lock();
stillpicture = false;
still_m.unlock();
}
void cVideo::Standby(unsigned int)
{
}
int cVideo::getBlank(void)
{
return 0;
}
void cVideo::Pig(int x, int y, int w, int h, int /*osd_w*/, int /*osd_h*/, int /*startx*/, int /*starty*/, int /*endx*/, int /*endy*/)
{
pig_x = x;
pig_y = y;
pig_w = w;
pig_h = h;
pig_changed = true;
}
void cVideo::getPictureInfo(int &width, int &height, int &rate)
{
width = dec_w;
height = dec_h;
switch (dec_r) {
case 23://23.976fps
rate = VIDEO_FRAME_RATE_23_976;
break;
case 24:
rate = VIDEO_FRAME_RATE_24;
break;
case 25:
rate = VIDEO_FRAME_RATE_25;
break;
case 29://29,976fps
rate = VIDEO_FRAME_RATE_29_97;
break;
case 30:
rate = VIDEO_FRAME_RATE_30;
break;
case 50:
rate = VIDEO_FRAME_RATE_50;
break;
case 60:
rate = VIDEO_FRAME_RATE_60;
break;
default:
rate = dec_r;
break;
}
}
void cVideo::SetSyncMode(AVSYNC_TYPE)
{
};
int cVideo::SetStreamType(VIDEO_FORMAT v)
{
v_format = v;
return 0;
}
cVideo::SWFramebuffer *cVideo::getDecBuf(void)
{
buf_m.lock();
if (buf_num == 0) {
buf_m.unlock();
return NULL;
}
SWFramebuffer *p = &buffers[buf_out];
buf_out++;
buf_num--;
buf_out %= VDEC_MAXBUFS;
buf_m.unlock();
return p;
}
static int my_read(void *, uint8_t *buf, int buf_size)
{
int tmp = 0;
if (videoDecoder && bufpos < DMX_BUF_SZ - 4096) {
while (bufpos < buf_size && ++tmp < 20) { /* retry max 20 times */
int ret = videoDemux->Read(dmxbuf + bufpos, DMX_BUF_SZ - bufpos, 20);
if (ret > 0)
bufpos += ret;
}
}
if (bufpos == 0)
return 0;
if (bufpos > buf_size) {
memcpy(buf, dmxbuf, buf_size);
memmove(dmxbuf, dmxbuf + buf_size, bufpos - buf_size);
bufpos -= buf_size;
return buf_size;
}
memcpy(buf, dmxbuf, bufpos);
tmp = bufpos;
bufpos = 0;
return tmp;
}
void cVideo::run(void)
{
lt_info("====================== start decoder thread ================================\n");
AVCodec *codec;
AVCodecParameters *p = NULL;
AVCodecContext *c= NULL;
AVFormatContext *avfc = NULL;
AVInputFormat *inp;
AVFrame *frame, *rgbframe;
uint8_t *inbuf = (uint8_t *)av_malloc(INBUF_SIZE);
AVPacket avpkt;
struct SwsContext *convert = NULL;
time_t warn_r = 0; /* last read error */
time_t warn_d = 0; /* last decode error */
bufpos = 0;
buf_num = 0;
buf_in = 0;
buf_out = 0;
dec_r = 0;
av_init_packet(&avpkt);
inp = av_find_input_format("mpegts");
AVIOContext *pIOCtx = avio_alloc_context(inbuf, INBUF_SIZE, // internal Buffer and its size
0, // bWriteable (1=true,0=false)
NULL, // user data; will be passed to our callback functions
my_read, // read callback
NULL, // write callback
NULL); // seek callback
avfc = avformat_alloc_context();
avfc->pb = pIOCtx;
avfc->iformat = inp;
avfc->probesize = 188*5;
thread_running = true;
if (avformat_open_input(&avfc, NULL, inp, NULL) < 0) {
lt_info("%s: Could not open input\n", __func__);
goto out;
}
while (avfc->nb_streams < 1)
{
lt_info("%s: nb_streams %d, should be 1 => retry\n", __func__, avfc->nb_streams);
if (av_read_frame(avfc, &avpkt) < 0)
lt_info("%s: av_read_frame < 0\n", __func__);
av_packet_unref(&avpkt);
if (! thread_running)
goto out;
}
p = avfc->streams[0]->codecpar;
if (p->codec_type != AVMEDIA_TYPE_VIDEO)
lt_info("%s: no video codec? 0x%x\n", __func__, p->codec_type);
codec = avcodec_find_decoder(p->codec_id);
if (!codec) {
lt_info("%s: Codec for %s not found\n", __func__, avcodec_get_name(p->codec_id));
goto out;
}
c = avcodec_alloc_context3(codec);
if (avcodec_open2(c, codec, NULL) < 0) {
lt_info("%s: Could not open codec\n", __func__);
goto out;
}
frame = av_frame_alloc();
rgbframe = av_frame_alloc();
if (!frame || !rgbframe) {
lt_info("%s: Could not allocate video frame\n", __func__);
goto out2;
}
lt_info("decoding %s\n", avcodec_get_name(c->codec_id));
while (thread_running) {
if (av_read_frame(avfc, &avpkt) < 0) {
if (warn_r - time(NULL) > 4) {
lt_info("%s: av_read_frame < 0\n", __func__);
warn_r = time(NULL);
}
usleep(10000);
continue;
}
int got_frame = 0;
int len = avcodec_decode_video2(c, frame, &got_frame, &avpkt);
if (len < 0) {
if (warn_d - time(NULL) > 4) {
lt_info("%s: avcodec_decode_video2 %d\n", __func__, len);
warn_d = time(NULL);
}
av_packet_unref(&avpkt);
continue;
}
if (avpkt.size > len)
lt_info("%s: WARN: pkt->size %d != len %d\n", __func__, avpkt.size, len);
still_m.lock();
if (got_frame && ! stillpicture) {
unsigned int need = av_image_get_buffer_size(VDEC_PIXFMT, c->width, c->height, 1);
convert = sws_getCachedContext(convert,
c->width, c->height, c->pix_fmt,
c->width, c->height, VDEC_PIXFMT,
SWS_BICUBIC, 0, 0, 0);
if (!convert)
lt_info("%s: ERROR setting up SWS context\n", __func__);
else {
buf_m.lock();
SWFramebuffer *f = &buffers[buf_in];
if (f->size() < need)
f->resize(need);
av_image_fill_arrays(rgbframe->data, rgbframe->linesize, &(*f)[0], VDEC_PIXFMT,
c->width, c->height, 1);
sws_scale(convert, frame->data, frame->linesize, 0, c->height,
rgbframe->data, rgbframe->linesize);
if (dec_w != c->width || dec_h != c->height) {
lt_info("%s: pic changed %dx%d -> %dx%d\n", __func__,
dec_w, dec_h, c->width, c->height);
dec_w = c->width;
dec_h = c->height;
w_h_changed = true;
}
f->width(c->width);
f->height(c->height);
int64_t vpts = av_frame_get_best_effort_timestamp(frame);
/* a/v delay determined experimentally :-) */
#if USE_OPENGL
if (v_format == VIDEO_FORMAT_MPEG2)
vpts += 90000*4/10; /* 400ms */
else
vpts += 90000*3/10; /* 300ms */
#endif
#if USE_CLUTTER
/* no idea why there's a difference between OpenGL and clutter rendering... */
if (v_format == VIDEO_FORMAT_MPEG2)
vpts += 90000*3/10; /* 300ms */
#endif
f->pts(vpts);
AVRational a = av_guess_sample_aspect_ratio(avfc, avfc->streams[0], frame);
f->AR(a);
buf_in++;
buf_in %= VDEC_MAXBUFS;
buf_num++;
if (buf_num > (VDEC_MAXBUFS - 1)) {
lt_debug("%s: buf_num overflow\n", __func__);
buf_out++;
buf_out %= VDEC_MAXBUFS;
buf_num--;
}
dec_r = c->time_base.den/(c->time_base.num * c->ticks_per_frame);
buf_m.unlock();
}
lt_debug("%s: time_base: %d/%d, ticks: %d rate: %d pts 0x%" PRIx64 "\n", __func__,
c->time_base.num, c->time_base.den, c->ticks_per_frame, dec_r,
av_frame_get_best_effort_timestamp(frame));
} else
lt_debug("%s: got_frame: %d stillpicture: %d\n", __func__, got_frame, stillpicture);
still_m.unlock();
av_packet_unref(&avpkt);
}
sws_freeContext(convert);
out2:
avcodec_close(c);
av_free(c);
av_frame_free(&frame);
av_frame_free(&rgbframe);
out:
avformat_close_input(&avfc);
av_free(pIOCtx->buffer);
av_free(pIOCtx);
/* reset output buffers */
bufpos = 0;
still_m.lock();
if (!stillpicture) {
buf_num = 0;
buf_in = 0;
buf_out = 0;
}
still_m.unlock();
lt_info("======================== end decoder thread ================================\n");
}
static bool swscale(unsigned char *src, unsigned char *dst, int sw, int sh, int dw, int dh, AVPixelFormat sfmt)
{
bool ret = false;
int len = 0;
struct SwsContext *scale = NULL;
scale = sws_getCachedContext(scale, sw, sh, sfmt, dw, dh, AV_PIX_FMT_RGB32, SWS_BICUBIC, 0, 0, 0);
if (!scale) {
lt_info_c("%s: ERROR setting up SWS context\n", __func__);
return ret;
}
AVFrame *sframe = av_frame_alloc();
AVFrame *dframe = av_frame_alloc();
if (sframe && dframe) {
len = av_image_fill_arrays(sframe->data, sframe->linesize, &(src)[0], sfmt, sw, sh, 1);
if(len>-1)
ret = true;
if(ret && (len = av_image_fill_arrays(dframe->data, dframe->linesize, &(dst)[0], AV_PIX_FMT_RGB32, dw, dh, 1)<0))
ret = false;
if(ret && (len = sws_scale(scale, sframe->data, sframe->linesize, 0, sh, dframe->data, dframe->linesize)<0))
ret = false;
else
ret = true;
}else{
lt_info_c("%s: could not alloc sframe (%p) or dframe (%p)\n", __func__, sframe, dframe);
ret = false;
}
if(sframe){
av_frame_free(&sframe);
sframe = NULL;
}
if(dframe){
av_frame_free(&dframe);
dframe = NULL;
}
if(scale){
sws_freeContext(scale);
scale = NULL;
}
lt_info_c("%s: %s scale %ix%i to %ix%i ,len %i\n",ret?" ":"ERROR",__func__, sw, sh, dw, dh,len);
return ret;
}
bool cVideo::GetScreenImage(unsigned char * &data, int &xres, int &yres, bool get_video, bool get_osd, bool scale_to_video)
{
lt_info("%s: data 0x%p xres %d yres %d vid %d osd %d scale %d\n",
__func__, data, xres, yres, get_video, get_osd, scale_to_video);
SWFramebuffer video;
std::vector<unsigned char> *osd = NULL;
std::vector<unsigned char> s_osd; /* scaled OSD */
int vid_w = 0, vid_h = 0;
int osd_w = glfb_priv->getOSDWidth();
int osd_h = glfb_priv->getOSDHeight();
xres = osd_w;
yres = osd_h;
if (get_video) {
buf_m.lock();
video = buffers[buf_out];
buf_m.unlock();
vid_w = video.width();
vid_h = video.height();
if (scale_to_video || !get_osd) {
xres = vid_w;
yres = vid_h;
AVRational a = video.AR();
/* TODO: this does not consider display_aspect and display_crop */
if (a.num > 0 && a.den > 0)
xres = vid_w * a.num / a.den;
}
}
if(video.empty()){
get_video=false;
xres = osd_w;
yres = osd_h;
}
if (get_osd)
osd = glfb_priv->getOSDBuffer();
unsigned int need = av_image_get_buffer_size(AV_PIX_FMT_RGB32, xres, yres, 1);
data = (unsigned char *)realloc(data, need); /* will be freed by caller */
if (data == NULL) /* out of memory? */
return false;
if (get_video) {
#if USE_OPENGL //memcpy dont work with copy BGR24 to RGB32
if (vid_w != xres || vid_h != yres){ /* scale video into data... */
#endif
bool ret = swscale(&video[0], data, vid_w, vid_h, xres, yres,VDEC_PIXFMT);
if(!ret){
free(data);
return false;
}
#if USE_OPENGL //memcpy dont work with copy BGR24 to RGB32
}else{ /* get_video and no fancy scaling needed */
memcpy(data, &video[0], xres * yres * sizeof(uint32_t));
}
#endif
}
if (get_osd && (osd_w != xres || osd_h != yres)) {
/* rescale osd */
s_osd.resize(need);
bool ret = swscale(&(*osd)[0], &s_osd[0], osd_w, osd_h, xres, yres,AV_PIX_FMT_RGB32);
if(!ret){
free(data);
return false;
}
osd = &s_osd;
}
if (get_video && get_osd) {
/* alpha blend osd onto data (video). TODO: maybe libavcodec can do this? */
uint32_t *d = (uint32_t *)data;
uint32_t *pixpos = (uint32_t *)&(*osd)[0];
for (int count = 0; count < yres; count++) {
for (int count2 = 0; count2 < xres; count2++ ) {
uint32_t pix = *pixpos;
if ((pix & 0xff000000) == 0xff000000)
*d = pix;
else {
uint8_t *in = (uint8_t *)(pixpos);
uint8_t *out = (uint8_t *)d;
int a = in[3]; /* TODO: big/little endian? */
*out = (*out + ((*in - *out) * a) / 256);
in++; out++;
*out = (*out + ((*in - *out) * a) / 256);
in++; out++;
*out = (*out + ((*in - *out) * a) / 256);
}
d++;
pixpos++;
}
}
}
else if (get_osd) /* only get_osd, data is not yet populated */
memcpy(data, &(*osd)[0], xres * yres * sizeof(uint32_t));
return true;
}
int64_t cVideo::GetPTS(void)
{
int64_t pts = 0;
buf_m.lock();
if (buf_num != 0)
pts = buffers[buf_out].pts();
buf_m.unlock();
return pts;
}
void cVideo::SetDemux(cDemux *)
{
lt_debug("%s: not implemented yet\n", __func__);
}

227
libgeneric-pc/video_lib.h Normal file
View File

@@ -0,0 +1,227 @@
#ifndef _VIDEO_LIB_H
#define _VIDEO_LIB_H
#include <OpenThreads/Thread>
#include <OpenThreads/Mutex>
#include <vector>
#include <linux/dvb/video.h>
#include "cs_types.h"
#include "dmx_hal.h"
extern "C" {
#include <libavutil/rational.h>
}
typedef enum {
ANALOG_SD_RGB_CINCH = 0x00,
ANALOG_SD_YPRPB_CINCH,
ANALOG_HD_RGB_CINCH,
ANALOG_HD_YPRPB_CINCH,
ANALOG_SD_RGB_SCART = 0x10,
ANALOG_SD_YPRPB_SCART,
ANALOG_HD_RGB_SCART,
ANALOG_HD_YPRPB_SCART,
ANALOG_SCART_MASK = 0x10
} analog_mode_t;
typedef enum {
VIDEO_FORMAT_MPEG2 = 0,
VIDEO_FORMAT_MPEG4_H264,
VIDEO_FORMAT_VC1,
VIDEO_FORMAT_JPEG,
VIDEO_FORMAT_GIF,
VIDEO_FORMAT_PNG,
VIDEO_FORMAT_MPEG4_H265,
VIDEO_FORMAT_AVS = 16
} VIDEO_FORMAT;
typedef enum {
VIDEO_SD = 0,
VIDEO_HD,
VIDEO_120x60i,
VIDEO_320x240i,
VIDEO_1440x800i,
VIDEO_360x288i
} VIDEO_DEFINITION;
typedef enum {
VIDEO_FRAME_RATE_23_976 = 0,
VIDEO_FRAME_RATE_24,
VIDEO_FRAME_RATE_25,
VIDEO_FRAME_RATE_29_97,
VIDEO_FRAME_RATE_30,
VIDEO_FRAME_RATE_50,
VIDEO_FRAME_RATE_59_94,
VIDEO_FRAME_RATE_60
} VIDEO_FRAME_RATE;
typedef enum {
DISPLAY_AR_1_1,
DISPLAY_AR_4_3,
DISPLAY_AR_14_9,
DISPLAY_AR_16_9,
DISPLAY_AR_20_9,
DISPLAY_AR_RAW
} DISPLAY_AR;
typedef enum {
DISPLAY_AR_MODE_PANSCAN = 0,
DISPLAY_AR_MODE_LETTERBOX,
DISPLAY_AR_MODE_NONE,
DISPLAY_AR_MODE_PANSCAN2
} DISPLAY_AR_MODE;
typedef enum {
VIDEO_DB_DR_NEITHER = 0,
VIDEO_DB_ON,
VIDEO_DB_DR_BOTH
} VIDEO_DB_DR;
typedef enum {
VIDEO_PLAY_STILL = 0,
VIDEO_PLAY_CLIP,
VIDEO_PLAY_TRICK,
VIDEO_PLAY_MOTION,
VIDEO_PLAY_MOTION_NO_SYNC
} VIDEO_PLAY_MODE;
typedef enum {
VIDEO_STD_NTSC,
VIDEO_STD_SECAM,
VIDEO_STD_PAL,
VIDEO_STD_480P,
VIDEO_STD_576P,
VIDEO_STD_720P60,
VIDEO_STD_1080I60,
VIDEO_STD_720P50,
VIDEO_STD_1080I50,
VIDEO_STD_1080P30,
VIDEO_STD_1080P24,
VIDEO_STD_1080P25,
VIDEO_STD_AUTO,
VIDEO_STD_1080P50, /* SPARK only */
VIDEO_STD_MAX
} VIDEO_STD;
/* not used, for dummy functions */
typedef enum {
VIDEO_HDMI_CEC_MODE_OFF = 0,
VIDEO_HDMI_CEC_MODE_TUNER,
VIDEO_HDMI_CEC_MODE_RECORDER
} VIDEO_HDMI_CEC_MODE;
typedef enum
{
VIDEO_CONTROL_BRIGHTNESS = 0,
VIDEO_CONTROL_CONTRAST,
VIDEO_CONTROL_SATURATION,
VIDEO_CONTROL_HUE,
VIDEO_CONTROL_SHARPNESS,
VIDEO_CONTROL_MAX = VIDEO_CONTROL_SHARPNESS
} VIDEO_CONTROL;
#define VDEC_MAXBUFS 0x40
class cVideo : public OpenThreads::Thread
{
friend class GLFbPC;
friend class cDemux;
private:
/* called from GL thread */
class SWFramebuffer : public std::vector<unsigned char>
{
public:
SWFramebuffer() : mWidth(0), mHeight(0) {}
void width(int w) { mWidth = w; }
void height(int h) { mHeight = h; }
void pts(uint64_t p) { mPts = p; }
void AR(AVRational a) { mAR = a; }
int width() const { return mWidth; }
int height() const { return mHeight; }
int64_t pts() const { return mPts; }
AVRational AR() const { return mAR; }
private:
int mWidth;
int mHeight;
int64_t mPts;
AVRational mAR;
};
int buf_in, buf_out, buf_num;
int64_t GetPTS(void);
public:
/* constructor & destructor */
cVideo(int mode, void *, void *, unsigned int unit = 0);
~cVideo(void);
void * GetTVEnc() { return NULL; };
void * GetTVEncSD() { return NULL; };
/* aspect ratio */
int getAspectRatio(void);
int setAspectRatio(int aspect, int mode);
void getPictureInfo(int &width, int &height, int &rate);
/* cropping mode */
int setCroppingMode(int x = 0 /*vidDispMode_t x = VID_DISPMODE_NORM*/);
/* get play state */
int getPlayState(void);
/* blank on freeze */
int getBlank(void);
int setBlank(int enable);
/* set video_system */
int SetVideoSystem(int video_system, bool remember = true);
int GetVideoSystem();
/* change video play state. Parameters are all unused. */
int Start(void *PcrChannel = NULL, unsigned short PcrPid = 0, unsigned short VideoPid = 0, void *x = NULL);
int Stop(bool blank = true);
bool Pause(void);
int SetStreamType(VIDEO_FORMAT type);
void ShowPicture(const char * fname);
void SetSyncMode(AVSYNC_TYPE mode);
bool SetCECMode(VIDEO_HDMI_CEC_MODE) { return true; };
void SetCECAutoView(bool) { return; };
void SetCECAutoStandby(bool) { return; };
void StopPicture();
void Standby(unsigned int bOn);
void Pig(int x, int y, int w, int h, int osd_w = 1064, int osd_h = 600, int startx = 0, int starty = 0, int endx = 1279, int endy = 719);
void SetControl(int, int) { return; };
void setContrast(int val);
void SetVideoMode(analog_mode_t mode);
void SetDBDR(int) { return; };
void SetAudioHandle(void *) { return; };
void SetAutoModes(int [VIDEO_STD_MAX]) { return; };
int OpenVBI(int) { return 0; };
int CloseVBI(void) { return 0; };
int StartVBI(unsigned short) { return 0; };
int StopVBI(void) { return 0; };
void SetDemux(cDemux *dmx);
bool GetScreenImage(unsigned char * &data, int &xres, int &yres, bool get_video = true, bool get_osd = false, bool scale_to_video = false);
SWFramebuffer *getDecBuf(void);
private:
void run();
SWFramebuffer buffers[VDEC_MAXBUFS];
int dec_w, dec_h;
int dec_r;
bool w_h_changed;
bool thread_running;
VIDEO_FORMAT v_format;
VIDEO_STD v_std;
OpenThreads::Mutex buf_m;
DISPLAY_AR display_aspect;
DISPLAY_AR_MODE display_crop;
int output_h;
int pig_x;
int pig_y;
int pig_w;
int pig_h;
bool pig_changed;
OpenThreads::Mutex still_m;
bool stillpicture;
};
#endif