123#if !defined(HAVE_VIDEO_CONSOLE) || !defined(HAVE_FFMPEG) 
  166static void my_scale(
struct fbuf_t *
in, AVPicture *p_in,
 
  180    struct timeval      last_frame; 
 
  200struct video_out_desc {
 
  213    struct fbuf_t   loc_src_geometry;   
 
  219    AVFrame     *enc_in_frame;  
 
  229    int         device_secondary; 
 
  231    int             picture_in_picture; 
 
  257    char keypad_file[256];      
 
  258    char keypad_font[256];      
 
  260    char sdl_videodriver[256];
 
  271    struct gui_info *gui;
 
  273    struct video_out_desc 
out;  
 
  276static AVPicture *fill_pict(
struct fbuf_t *
b, AVPicture *p);
 
  282    if (
b->data && 
b->size)
 
  284    memset(
b, 
'\0', 
sizeof(*
b));
 
  288    b->pix_fmt = 
x.pix_fmt;
 
  296    return env ? 
env->stayopen : 0;
 
  305used_mem(
const char *msg)
 
  309    pid_t pid = getpid();
 
  310    sprintf(
in, 
"ps -o vsz= -o rss= %d", pid);
 
  332static int grabber_open(
struct video_out_desc *v)
 
  339    for (i = 0; i < v->device_num; i++) {
 
  341        if (v->devices[i].grabber)
 
  346            g_data = g->
open(v->devices[i].name, &v->loc_src_geometry, v->fps);
 
  349            v->devices[i].grabber = g;
 
  350            v->devices[i].grabber_data = g_data;
 
  351            v->devices[i].status_index |= IS_ON;
 
  355    for (i = 0; i < v->device_num; i++) {
 
  356        if (!v->devices[i].grabber)
 
  358        v->device_primary = i;
 
  359        v->device_secondary = i;
 
  376static struct fbuf_t *grabber_read(
struct video_device *dev, 
int fps)
 
  380    if (dev->grabber == 
NULL) 
 
  389    dev->last_frame = now; 
 
  390    return dev->grabber->read(dev->grabber_data);
 
  397static void grabber_move(
struct video_device *dev, 
int dx, 
int dy)
 
  399    if (dev->grabber && dev->grabber->move) {
 
  400        dev->grabber->move(dev->grabber_data, dx, dy);
 
  426static int video_out_uninit(
struct video_desc *
env)
 
  428    struct video_out_desc *v = &
env->out;
 
  433        AVCodecContext *enc_ctx = (AVCodecContext *)v->enc_ctx;
 
  434        avcodec_close(enc_ctx);
 
  438    if (v->enc_in_frame) {
 
  439        av_free(v->enc_in_frame);
 
  440        v->enc_in_frame = 
NULL;
 
  447    for (i = 0; i < v->device_num; i++) {
 
  448        if (v->devices[i].grabber){
 
  449            v->devices[i].grabber_data =
 
  450                v->devices[i].grabber->close(v->devices[i].grabber_data);
 
  451            v->devices[i].grabber = 
NULL;
 
  453            v->devices[i].dev_buf = 
NULL;
 
  455        v->devices[i].status_index = 0;
 
  457    v->picture_in_picture = 0;
 
  458    env->frame_freeze = 0;
 
  469static int video_out_init(
struct video_desc *
env)
 
  474    struct video_out_desc *v = &
env->out;
 
  478    v->enc_in_frame     = 
NULL;
 
  479    v->enc_out.data     = 
NULL;
 
  482    v->codec = avcodec_find_encoder(codec);
 
  494    enc_in = &
env->enc_in;
 
  495    enc_in->
pix_fmt = PIX_FMT_YUV420P;
 
  496    enc_in->
size = (enc_in->
w * enc_in->
h * 3)/2;
 
  500        return video_out_uninit(
env);
 
  503    v->enc_in_frame = avcodec_alloc_frame();
 
  504    if (!v->enc_in_frame) {
 
  506        return video_out_uninit(
env);
 
  510    size = enc_in->
w * enc_in->
h;
 
  511    v->enc_in_frame->data[0] = enc_in->
data;
 
  512    v->enc_in_frame->data[1] = v->enc_in_frame->data[0] + size;
 
  513    v->enc_in_frame->data[2] = v->enc_in_frame->data[1] + size/4;
 
  514    v->enc_in_frame->linesize[0] = enc_in->
w;
 
  515    v->enc_in_frame->linesize[1] = enc_in->
w/2;
 
  516    v->enc_in_frame->linesize[2] = enc_in->
w/2;
 
  522        AVCodecContext *enc_ctx = avcodec_alloc_context();
 
  523        v->enc_ctx = enc_ctx;
 
  524        enc_ctx->pix_fmt = enc_in->
pix_fmt;
 
  525        enc_ctx->width = enc_in->
w;
 
  526        enc_ctx->height = enc_in->
h;
 
  530        enc_ctx->rtp_mode = 1;
 
  531        enc_ctx->rtp_payload_size = v->mtu / 2; 
 
  532        enc_ctx->bit_rate = v->bitrate;
 
  533        enc_ctx->bit_rate_tolerance = enc_ctx->bit_rate/2;
 
  534        enc_ctx->qmin = v->qmin;    
 
  535        enc_ctx->time_base = (AVRational){1, v->fps};
 
  536        enc_ctx->gop_size = v->fps*5; 
 
  538        v->enc->enc_init(v->enc_ctx);
 
  540        if (avcodec_open(enc_ctx, v->codec) < 0) {
 
  544            return video_out_uninit(
env);
 
  552    v->enc_out.size = enc_in->
size;
 
  571    if (
env->stayopen == 0) { 
 
  573        for (i=0; 
env->shutdown && i < 10; i++) {
 
  591static AVPicture *fill_pict(
struct fbuf_t *
b, AVPicture *p)
 
  594    int l4 = 
b->w * 
b->h/4; 
 
  599    memset(p, 
'\0', 
sizeof(*p));
 
  600    switch (
b->pix_fmt) {
 
  610    case PIX_FMT_YUYV422:   
 
  617    p->data[0] = 
b->data;
 
  618    p->linesize[0] = 
len;
 
  620    p->data[1] = luv ? 
b->data + 4*l4 : 
b->data+
len;
 
  621    p->data[2] = luv ? 
b->data + 5*l4 : 
b->data+
len;
 
  622    p->linesize[1] = luv;
 
  623    p->linesize[2] = luv;
 
  627    p->data[0] += 
len*
b->win_y + 
b->win_x*sample_size;
 
  629        p->data[1] += luv*(
b->win_y/2) + (
b->win_x/2) * sample_size;
 
  630        p->data[2] += luv*(
b->win_y/2) + (
b->win_x/2) * sample_size;
 
  639static void my_scale(
struct fbuf_t *
in, AVPicture *p_in,
 
  642    AVPicture my_p_in, my_p_out;
 
  643    int eff_w=
out->w, eff_h=
out->h;
 
  646        p_in = fill_pict(
in, &my_p_in);
 
  648        p_out = fill_pict(
out, &my_p_out);
 
  659    img_convert(p_out, 
out->pix_fmt,
 
  660        p_in, 
in->pix_fmt, 
in->w, 
in->h);
 
  663        struct SwsContext *convert_ctx;
 
  665        convert_ctx = sws_getContext(
in->w, 
in->h, 
in->pix_fmt,
 
  666            eff_w, eff_h, 
out->pix_fmt,
 
  668        if (convert_ctx == 
NULL) {
 
  669            ast_log(
LOG_ERROR, 
"FFMPEG::convert_cmodel : swscale context initialization failed\n");
 
  674                in->pix_fmt, 
in->w, 
in->h, 
out->pix_fmt, eff_w, eff_h);
 
  675        sws_scale(convert_ctx,
 
  676            p_in->data, p_in->linesize,
 
  678            p_out->data, p_out->linesize);
 
  680        sws_freeContext(convert_ctx);
 
  715#if defined(DROP_PACKETS) && DROP_PACKETS > 0 
  717    if ((random() % 10000) <= 100*DROP_PACKETS) {
 
  792    struct video_out_desc *v = &
env->out;
 
  794    struct fbuf_t *loc_src_primary = 
NULL, *p_read;
 
  797    if (!
env->out.device_num)
 
  801    for (i = 0; i < 
env->out.device_num; i++) {
 
  802        p_read = grabber_read(&
env->out.devices[i], 
env->out.fps);
 
  805            env->out.devices[i].dev_buf = p_read;
 
  808    loc_src_primary = 
env->out.devices[
env->out.device_primary].dev_buf;
 
  811    if (loc_src_primary) {
 
  814        my_scale(loc_src_primary, 
NULL, &
env->enc_in, 
NULL);
 
  816    if (
env->out.picture_in_picture) { 
 
  817        struct fbuf_t *loc_src_secondary;
 
  819        loc_src_secondary = 
env->out.devices[
env->out.device_secondary].dev_buf;
 
  820        if (loc_src_secondary) {
 
  821            env->enc_in.win_x = 
env->out.pip_x;
 
  822            env->enc_in.win_y = 
env->out.pip_y;
 
  823            env->enc_in.win_w = 
env->enc_in.w/3;
 
  824            env->enc_in.win_h = 
env->enc_in.h/3;
 
  827            my_scale(loc_src_secondary, 
NULL, &
env->enc_in, 
NULL);
 
  829            env->enc_in.win_x = 0;
 
  830            env->enc_in.win_y = 0;
 
  831            env->enc_in.win_w = 0;
 
  832            env->enc_in.win_h = 0;
 
  837            env->out.picture_in_picture = 0; 
 
  841    for (i = 0; i < 
env->out.device_num; i++)
 
  847    if (!
env->owner || !loc_src_primary || !v->sendvideo)
 
  849    if (v->enc_out.data == 
NULL) {
 
  850        static volatile int a = 0;
 
  856    return v->enc->enc_encap(&v->enc_out, v->mtu, tail);
 
  865static void *video_thread(
void *arg)
 
  867    struct video_desc *
env = arg;
 
  869    char save_display[128] = 
"";
 
  877        const char *s = getenv(
"DISPLAY");
 
  878        setenv(
"SDL_VIDEODRIVER", 
env->sdl_videodriver, 1);
 
  879        if (s && !strcasecmp(
env->sdl_videodriver, 
"aalib-console")) {
 
  886        setenv(
"DISPLAY", save_display, 1);
 
  891    if (grabber_open(&
env->out)) {
 
  895    if (
env->out.device_num) {
 
  896        env->out.devices[
env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY;
 
  907        for (i = 0; i < 
env->out.device_num; i++) {
 
  909                src_msgs[
env->out.devices[i].status_index]);
 
  914        struct timespec t = { 0, 50000000 };    
 
  918        char *caption = 
NULL, 
buf[160];
 
  921        if (count++ % 10 == 0) {
 
  922            if (
env->out.sendvideo && 
env->out.devices) {
 
  923                snprintf(
buf, 
sizeof(
buf), 
"%s %s %dx%d @@ %dfps %dkbps",
 
  924                env->out.devices[
env->out.device_primary].name, 
env->codec_name,
 
  925                env->enc_in.w, 
env->enc_in.h,
 
  926                env->out.fps, 
env->out.bitrate / 1000);
 
  928                sprintf(
buf, 
"hold");
 
  971        f = get_video_frames(
env, &p);  
 
  997        if (ast_channel_alertable(chan)) {
 
  999                if (ast_channel_alert(chan)) {
 
 1000                    ast_log(
LOG_WARNING, 
"Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n",
 
 1009    video_out_uninit(
env);
 
 1030static void init_env(
struct video_desc *
env)
 
 1032    struct fbuf_t *
c = &(
env->out.loc_src_geometry);        
 
 1038    c->pix_fmt = PIX_FMT_YUV420P;   
 
 1039    ei->
pix_fmt = PIX_FMT_YUV420P;  
 
 1040    if (ei->
w == 0 || ei->
h == 0) {
 
 1046    copy_geometry(ei, 
c);   
 
 1047    copy_geometry(ei, rd);  
 
 1048    copy_geometry(rd, ld);  
 
 1051    for (i = 0; i < 
env->out.device_num; i++) {
 
 1052        env->src_dpy[i].pix_fmt = PIX_FMT_YUV420P;
 
 1059    env->out.pip_x = ei->
w - ei->
w/3;
 
 1060    env->out.pip_y = ei->
h - ei->
h/3;
 
 1079    env->out.enc = map_config_video_format(
env->codec_name);
 
 1082        env->codec_name, 
env->enc_in.w,  
env->enc_in.h);
 
 1089    avcodec_register_all();
 
 1090    av_log_set_level(AV_LOG_ERROR); 
 
 1092    if (
env->out.fps == 0) {
 
 1096    if (
env->out.bitrate == 0) {
 
 1097        env->out.bitrate = 65000;
 
 1111static int video_geom(
struct fbuf_t *
b, 
const char *s)
 
 1116        const char *s; 
int w; 
int h;
 
 1118        {
"16cif",   1408, 1152 },
 
 1119        {
"xga",     1024, 768 },
 
 1120        {
"4cif",    704, 576 },
 
 1123        {
"qvga",    320, 240 },
 
 1124        {
"qcif",    176, 144 },
 
 1125        {
"sqcif",   128, 96 },
 
 1128    if (*s == 
'<' || *s == 
'>')
 
 1129        sscanf(s+1,
"%dx%d", &w, &h);
 
 1130    for (fp = 
formats; fp->s; fp++) {
 
 1137        } 
else if (*s == 
'<') { 
 
 1140        } 
else if (!strcasecmp(s, fp->s)) { 
 
 1144    if (*s == 
'<' && fp->s == 
NULL) 
 
 1149    } 
else if (sscanf(s, 
"%dx%d", &
b->w, &
b->h) != 2) {
 
 1173static int device_table_fill(
struct video_device *
devices, 
int *device_num_p, 
const char *s)
 
 1176    struct video_device *p;
 
 1179    if (*device_num_p >= 9)
 
 1182    for (i = 0; i < *device_num_p; i++) {
 
 1193    p->grabber_data = 
NULL;
 
 1196    p->status_index = 0;
 
 1207    if (!strcasecmp(
var, 
"videodevice")) {
 
 1208        ast_cli(fd, 
"videodevice is [%s]\n", 
env->out.devices[
env->out.device_primary].name);
 
 1209    } 
else if (!strcasecmp(
var, 
"videocodec")) {
 
 1210        ast_cli(fd, 
"videocodec is [%s]\n", 
env->codec_name);
 
 1211    } 
else if (!strcasecmp(
var, 
"sendvideo")) {
 
 1212        ast_cli(fd, 
"sendvideo is [%s]\n", 
env->out.sendvideo ? 
"on" : 
"off");
 
 1213    } 
else if (!strcasecmp(
var, 
"video_size")) {
 
 1214        int in_w = 0, in_h = 0;
 
 1216            in_w = 
env->in->dec_out.w;
 
 1217            in_h = 
env->in->dec_out.h;
 
 1219        ast_cli(fd, 
"sizes: video %dx%d camera %dx%d local %dx%d remote %dx%d in %dx%d\n",
 
 1220            env->enc_in.w, 
env->enc_in.h,
 
 1221            env->out.loc_src_geometry.w, 
env->out.loc_src_geometry.h,
 
 1222            env->loc_dpy.w, 
env->loc_dpy.h,
 
 1223            env->rem_dpy.w, 
env->rem_dpy.h,
 
 1225    } 
else if (!strcasecmp(
var, 
"bitrate")) {
 
 1226        ast_cli(fd, 
"bitrate is [%d]\n", 
env->out.bitrate);
 
 1227    } 
else if (!strcasecmp(
var, 
"qmin")) {
 
 1228        ast_cli(fd, 
"qmin is [%d]\n", 
env->out.qmin);
 
 1229    } 
else if (!strcasecmp(
var, 
"fps")) {
 
 1231    } 
else if (!strcasecmp(
var, 
"startgui")) {
 
 1234    } 
else if (!strcasecmp(
var, 
"stopgui") && 
env->stayopen != 0) {
 
 1236        if (
env->gui && 
env->owner)
 
 1248    const char *
var, 
const char *
val)
 
 1250    struct video_desc *
env;
 
 1266        env->out.device_primary = 0;
 
 1267        env->out.device_secondary = 0;
 
 1269        env->out.bitrate = 65000;
 
 1270        env->out.sendvideo = 1;
 
 1272        env->out.device_num = 0;
 
 1275    CV_F(
"videodevice", device_table_fill(
env->out.devices, &
env->out.device_num, 
val));
 
 1277    CV_F(
"video_size", video_geom(&
env->enc_in, 
val));
 
 1278    CV_F(
"camera_size", video_geom(&
env->out.loc_src_geometry, 
val));
 
 1279    CV_F(
"local_size", video_geom(&
env->loc_dpy, 
val));
 
 1280    CV_F(
"remote_size", video_geom(&
env->rem_dpy, 
val));
 
 1284    CV_STR(
"keypad_font", 
env->keypad_font);
 
 1285    CV_STR(
"sdl_videodriver", 
env->sdl_videodriver);
 
struct sla_ringing_trunk * first
int unsetenv(const char *name)
int setenv(const char *name, const char *value, int overwrite)
Asterisk main include file. File version handling, generic pbx functions.
#define ast_strdup(str)
A wrapper for strdup()
#define ast_calloc(num, len)
A wrapper for calloc()
static void dummy(char *unused,...)
General Asterisk PBX channel definitions.
const char * ast_channel_name(const struct ast_channel *chan)
#define ast_channel_lock(chan)
struct ast_readq_list * ast_channel_readq(struct ast_channel *chan)
#define ast_channel_unlock(chan)
Standard Command Line Interface.
void ast_cli(int fd, const char *fmt,...)
#define ast_cli_command(fd, s)
static void sdl_setup(struct video_desc *env)
static void eventhandler(struct video_desc *env, const char *caption)
static struct gui_info * cleanup_sdl(struct gui_info *g, int n)
static int keypad_cfg_read(struct gui_info *gui, const char *val)
static void show_frame(struct video_desc *env, int out)
void console_video_start(struct video_desc *env, struct ast_channel *owner)
void console_video_uninit(struct video_desc *env)
int console_write_video(struct ast_channel *chan, struct ast_frame *f)
int console_video_cli(struct video_desc *env, const char *var, int fd)
int console_video_formats
int get_gui_startup(struct video_desc *env)
int console_video_config(struct video_desc **penv, const char *var, const char *val)
struct video_desc * get_video_desc(struct ast_channel *c)
struct grab_desc * console_grabbers[]
void fbuf_free(struct fbuf_t *)
#define MAX_VIDEO_SOURCES
int print_message(struct board *b, const char *s)
Generic File Format Support. Should be included by clients of the file handling routines....
static int len(struct ast_channel *chan, const char *cmd, char *data, char *buf, size_t buflen)
#define CV_START(__in_var, __in_val)
the macro to open a block for variable parsing
#define CV_END
close a variable parsing block
#define CV_STR(__x, __dst)
#define CV_F(__pattern, __body)
call a generic function if the name matches.
#define CV_BOOL(__x, __dst)
helper macros to assign the value to a BOOL, UINT, static string and dynamic string
#define CV_UINT(__x, __dst)
#define AST_LIST_NEXT(elm, field)
Returns the next entry in the list after the given entry.
#define ast_mutex_init(pmutex)
#define ast_mutex_unlock(a)
#define ast_mutex_destroy(a)
#define ast_mutex_lock(a)
static force_inline int attribute_pure ast_strlen_zero(const char *s)
void ast_copy_string(char *dst, const char *src, size_t size)
Size-limited null-terminating string copy.
Main Channel structure associated with a channel.
Data structure associated with a single frame of data.
struct ast_frame_subclass subclass
enum ast_frame_type frametype
union ast_frame::@239 data
struct ast_frame::@240 frame_list
Structure for mutex and tracking information.
void *(* open)(const char *name, struct fbuf_t *geom, int fps)
decoder_decap_f dec_decap
struct fbuf_t * dec_in_cur
struct video_codec_desc * d_callbacks
struct fbuf_t * dec_in_dpy
struct fbuf_t dec_in[N_DEC_IN]
int64_t ast_tvdiff_ms(struct timeval end, struct timeval start)
Computes the difference (in milliseconds) between two struct timeval instances.
struct timeval ast_tvnow(void)
Returns current timeval. Meant to replace calls to gettimeofday().
#define ast_pthread_create_detached_background(a, b, c, d)
static enum CodecID map_video_format(uint32_t ast_format, int rw)
map an asterisk format into an ffmpeg one
static struct video_dec_desc * dec_init(uint32_t the_ast_format)
static const struct video_codec_desc * supported_codecs[]
static struct video_dec_desc * dec_uninit(struct video_dec_desc *v)
uninitialize the descriptor for remote video stream