TI中文支持网
TI专业的中文技术问题搜集分享网站

am5728 采集后HDMI输出问题

大家好!我用AM5728采集视频数据,想对每帧数据进行图像处理,然后再输出HDMI。参考了如下例程,能写出文件可播放,现在想不输出文件,而直接通过HDMI输出显示。说起来很简单,搞了好长时间,没有好的解决方法。请大家帮我看看有没有办法。

/*
* V4L2 video capture example
*
* This program can be used and distributed without restrictions.
*/

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <getopt.h> /* getopt_long() */
#include <fcntl.h> /* low-level i/o */
#include <unistd.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h> /* for videodev2.h */
#include <linux/videodev2.h>

#include <signal.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>

/* Set Default Parameters */
#define STREAM_TYPE "NV12"
#define FRAME_WIDTH 1920
#define FRAME_HEIGHT 1080
#define FRAME_RATE 30
#define FRAME_SIZE FRAME_WIDTH * FRAME_HEIGHT * 1.5

#define CLEAR(x) memset (&(x), 0, sizeof (x))

/********************************************************************************************************************************/

struct buffer {
void * start;
size_t length;
};

struct buffer * buffers = NULL;

static char * dev_name = NULL;

static int fd = -1;

static unsigned int n_buffers = 0;

static GMainLoop *loop;

static GstElement *pipeline, *appsrc, *conv, *encoder, *app_sink;

static GstBus *bus;

static FILE *s_fp;

/********************************************************************************************************************************/

static void calc_fps() {
static int frame_cnt = 0;
static struct timeval start, end;
if (frame_cnt == 0)
gettimeofday( &start, NULL );
frame_cnt++;
gettimeofday( &end, NULL );
int timeuse = 1000000 * ( end.tv_sec – start.tv_sec ) + end.tv_usec -start.tv_usec;
if (timeuse > 1 * 1000 * 1000) {
float fps = frame_cnt / ((float)timeuse / 1000 / 1000);
printf(" — Fps — : %.2f \n", fps);
frame_cnt = 0;
}
}

static void neon_copy(volatile unsigned char *dst, volatile unsigned char *src, int size) {
if (size & 63)
size = (size & – 64) + 64;
asm volatile (
"neon_copy_pld: \n"
" vldm %[src]!,{d0-d7} \n"
" vstm %[dst]!,{d0-d7} \n"
" subs %[size],%[size],#0x40 \n"
" bgt neon_copy_pld \n"
: [dst]"+r"(dst), [src]"+r"(src), [size]"+r"(size) : : "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "cc", "memory");
}

static void errno_exit(const char *s){
fprintf (stderr, "%s error %d, %s\n",s, errno, strerror (errno));
exit (EXIT_FAILURE);
}

static int xioctl(int fd, int request, void * arg){
int r;
do r = ioctl (fd, request, arg);
while (-1 == r && EINTR == errno);
return r;
}

static void start_capturing(void){
unsigned int i;
enum v4l2_buf_type type;
for (i = 0; i < n_buffers; ++i) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == xioctl (fd, VIDIOC_QBUF, &buf))
errno_exit ("VIDIOC_QBUF");
} type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl (fd, VIDIOC_STREAMON, &type))
errno_exit ("VIDIOC_STREAMON");
}

static void uninit_device (void){
unsigned int i;
for (i = 0; i < n_buffers; ++i)
if (-1 == munmap (buffers[i].start, buffers[i].length))
errno_exit ("munmap");
free (buffers);
}

static void init_mmap(void){
struct v4l2_requestbuffers req;
CLEAR (req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl (fd, VIDIOC_REQBUFS, &req)) {
if (EINVAL == errno){
fprintf (stderr, "%s does not support "
"memory mapping\n", dev_name);
exit (EXIT_FAILURE);
} else {
errno_exit ("VIDIOC_REQBUFS");
}
}
if (req.count < 2) {
fprintf (stderr, "Insufficient buffer memory on %s\n",dev_name);
exit (EXIT_FAILURE);
}
buffers = calloc (req.count, sizeof (*buffers));
if (!buffers) {
fprintf (stderr, "Out of memory\n");
exit (EXIT_FAILURE);
}
for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == xioctl (fd, VIDIOC_QUERYBUF, &buf))
errno_exit ("VIDIOC_QUERYBUF");
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap (NULL /* start anywhere */,
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
fd, buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start)
errno_exit ("mmap");
}
}

static void init_device(void){
struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
struct v4l2_format fmt;
unsigned int min;
if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) {
if (EINVAL == errno) {
fprintf (stderr, "%s is no V4L2 device\n", dev_name);
exit (EXIT_FAILURE);
} else {
errno_exit ("VIDIOC_QUERYCAP");
}
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
fprintf (stderr, "%s is no video capture device\n", dev_name);
exit (EXIT_FAILURE);
}
/* Select video input, video standard and tune here. */
CLEAR (cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) {
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect; /* reset to default */
if (-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) {
switch (errno) {
case EINVAL:
/* Cropping not supported. */
break;
default:
/* Errors ignored. */
break;
}
}
} else { /* Errors ignored. */
}
CLEAR (fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = FRAME_WIDTH;
fmt.fmt.pix.height = FRAME_HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt))
errno_exit ("VIDIOC_S_FMT");
/* Note VIDIOC_S_FMT may change width and height. */
/* Buggy driver paranoia. */
min = fmt.fmt.pix.width * 1.5;
if (fmt.fmt.pix.bytesperline < min)
fmt.fmt.pix.bytesperline = min;
min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
if (fmt.fmt.pix.sizeimage < min)
fmt.fmt.pix.sizeimage = min;
init_mmap ();
}

static void close_device(void){
if (-1 == close (fd))
errno_exit ("close");
fd = -1;
}

static void open_device(void){
struct stat st; if (-1 == stat (dev_name, &st)) {
fprintf (stderr, "Cannot identify '%s': %d, %s\n", dev_name, errno, strerror (errno));
exit (EXIT_FAILURE);
}
if (!S_ISCHR (st.st_mode)) {
fprintf (stderr, "%s is no device\n", dev_name);
exit (EXIT_FAILURE);
}
fd = open (dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);
if (-1 == fd) {
fprintf (stderr, "Cannot open '%s': %d, %s\n", dev_name, errno, strerror (errno));
exit (EXIT_FAILURE);
}
}

/********************************************************************************************************************************/

static gboolean bus_call(GstBus * bus, GstMessage * msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("ERROR:%s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}

static void cb_need_data (GstElement *appsrc, guint unused_size, gpointer user_data)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl (fd, VIDIOC_DQBUF, &buf)) {
switch (errno) {
case EAGAIN:
return;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
errno_exit ("VIDIOC_DQBUF");
}
}
assert (buf.index < n_buffers);
if (-1 == xioctl (fd, VIDIOC_QBUF, &buf))
errno_exit ("VIDIOC_QBUF");
GstBuffer *buffer;
GstFlowReturn ret;
GstMapInfo map;
buffer = gst_buffer_new_allocate (NULL, FRAME_SIZE, NULL);
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
unsigned char *raw = (unsigned char *)map.data;
neon_copy((volatile unsigned char *)raw, (volatile unsigned char *)buffers[buf.index].start, buf.length);
gst_buffer_unmap (buffer, &map);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
gst_buffer_unref (buffer);
calc_fps();
if (ret != GST_FLOW_OK) {
/* something wrong, stop pushing */
g_main_loop_quit (loop);
}
}

GstFlowReturn new_sample(GstAppSink *sink, gpointer data) {
GstSample* sample = gst_app_sink_pull_sample(sink);
GstMapInfo map;
GstMemory* memory = gst_buffer_get_all_memory(gst_sample_get_buffer(sample));
if (sample == NULL) {
return GST_FLOW_ERROR;
}
if (! gst_memory_map(memory, &map, GST_MAP_READ)) {
gst_memory_unref(memory);
}
/* save */
fwrite(map.data, map.size, 1, s_fp);
fflush(s_fp);
gst_memory_unmap(memory, &map);
gst_memory_unref(memory);
gst_sample_unref(sample);
return GST_FLOW_OK;
}

void GST_ENC_INIT()
{
/* Init Gstreamer */
gst_init (0, NULL);
loop = g_main_loop_new (NULL, FALSE);
/* Create a new pipeline */
pipeline = gst_pipeline_new ("pipeline");
/* Initialize elements */
appsrc = gst_element_factory_make ("appsrc", "source");
conv = gst_element_factory_make ("videoconvert", "conv");
encoder = gst_element_factory_make("ducatih264enc", "video-encoder"); //ducatih264enc
app_sink = gst_element_factory_make ("appsink", "app_sink");
if (!pipeline || !appsrc || !conv || !encoder || !app_sink){
g_printerr("One element could not be created.\n");
return;
}
/* setup */
g_object_set (G_OBJECT (appsrc), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, STREAM_TYPE,
"width", G_TYPE_INT, FRAME_WIDTH,
"height", G_TYPE_INT, FRAME_HEIGHT,
"framerate", GST_TYPE_FRACTION, FRAME_RATE, 1,
NULL), NULL);
/* set callbacks*/
g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);
GstAppSinkCallbacks *callbacks = (GstAppSinkCallbacks*)g_new0(GstAppSinkCallbacks, 1);
callbacks->eos = NULL;
callbacks->new_preroll = NULL;
callbacks->new_sample = new_sample;
gst_app_sink_set_callbacks (GST_APP_SINK(app_sink), callbacks, NULL, NULL);
/* Linkup elements */
gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, encoder, app_sink, NULL);
gst_element_link_many (appsrc, conv, encoder, app_sink, NULL);
/* add message monitor */
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
printf("Ready To Save Image … \n");
}

void GST_ENC_PLAY()
{
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
}

void GST_RELEASE()
{
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
g_main_loop_unref (loop);
}

/********************************************************************************************************************************/

static void usage(FILE *fp, int argc, char **argv){
fprintf (fp,
"Usage: %s [options]\n\n"
"Options:\n"
"-d | –device name Video device name [/dev/video]\n"
"-h | –help Print this message\n"
"",
argv[0]);
}

static const char short_options [] = "d:hmru";

static const struct option
long_options [] = {
{ "device", required_argument, NULL, 'd' },
{ "help", no_argument, NULL, 'h' },
{ 0, 0, 0, 0 }
};

int main(int argc, char **argv){
dev_name = "/dev/video1";
for (;;) {
int index;
int c;
c = getopt_long (argc, argv, short_options, long_options, &index);
if (-1 == c)
break;
switch (c) {
case 0: /* getopt_long() flag */
break;
case 'd':
dev_name = optarg;
break;
case 'h':
usage (stdout, argc, argv);
exit (EXIT_SUCCESS);
default:
usage (stderr, argc, argv);
exit (EXIT_FAILURE);
}
}
GST_ENC_INIT();
open_device ();
init_device ();
start_capturing ();
if((s_fp = fopen("test.264", "w"))!=NULL){
GST_ENC_PLAY();
}
GST_RELEASE();
fclose(s_fp);
uninit_device ();
close_device ();
exit (EXIT_SUCCESS);
return 0;
}

yongqing wang:

一般hdmi对应framework 层,有共享一段显存,将要显示的更新到这个内存就显示了

赞(0)
未经允许不得转载:TI中文支持网 » am5728 采集后HDMI输出问题
分享到: 更多 (0)