跪求大侠解释:用v4l编程采集数据放到frambuffer上,显示花屏(开发版是micro2440)
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>[code=C/C++][/code]
#include <linux/videodev.h>
#include <sys/ioctl.h>
#include <fcntl.h>//上锁
#include <linux/fb.h>
#include <sys/mman.h>
#include "camera.h"
/*********************************************************************************************************
** Function name: get_grab_frame
** Descriptions: 获取图像帧,该函数调用了VIDIOCMCAPTURE的ioctl,获取一帧图片
** Input: *vd,参数指针
** frame,帧号
** Output : 无
** Created by:
** Created Date:
**-------------------------------------------------------------------
** Modified by:
** Modified Date:
**------------------------------------------------------------------
********************************************************************************************************/
int get_grab_frame(fb_v41 *vd, int frame)
{
//如果正在采集中
if (vd->frame_using[frame]) {
fprintf(stderr, "get_grab_frame: frame %d is already used.\n", frame);
return ERR_FRAME_USING;
}
vd->mmap.frame = frame;
if (ioctl(vd->fd, VIDIOCMCAPTURE, &(vd->mmap)) < 0) { //开始获取图片
perror("v4l_grab_frame");
return ERR_GET_FRAME;
}
//置为采集忙状态
vd->frame_using[frame] = 1;
vd->frame_current = frame;
return 0;
}
/*********************************************************************************************************
** Function name: get_next_frame
** Descriptions: 获取下一帧的图像
** Input: *vd ,参数指针
** Output : 返回0表示正常完成返回。
** Created by:
** Created Date:
**-------------------------------------------------------------------
** Modified by:
** Modified Date:
**------------------------------------------------------------------
********************************************************************************************************/
int get_next_frame(fb_v41 *vd)
{
int ret;
vd->frame_current ^= 1;//两帧采集不是0就是1
ret = get_grab_frame( vd,vd->frame_current); // 获取图像数据
if( ret < 0 )
return ret;
if (ioctl(vd->fd, VIDIOCSYNC, &(vd->frame_current)) < 0) // 等待帧同步
{ perror("v4l_grab_sync");
return ERR_SYNC;
}
vd->frame_using[vd->frame_current] = 0 ;//采集完毕置0
return 0;
}
/*********************************************************************************************************
** Function name: get_next_frame_address
** Descriptions: 获取帧地址.调用该函数可以获取当前帧的缓冲地址
** Input: *vd ,参数指针
** Output : 返回帧图像数据的指针地址.
** Created by:
** Created Date:
**-------------------------------------------------------------------
** Modified by:
** Modified Date:
**------------------------------------------------------------------
********************************************************************************************************/
unsigned char *get_next_frame_address(fb_v41 *vd)
{
return (vd->map + vd->mbuf.offsets[vd->frame_current]); // 从MAP内存中找到当前帧的起始指针
}
/*********************************************************************************************************
** Function name: rgb_to_framebuffer
** Descriptions: 写图像数据到Framebuffer,使用该函数前必须成功执行open_framebuffer函数.
** Input: *vd ,参数指针
** width,图像的宽度vd->mmap.width
** height,图像高度
** xoffset,图在Framebuffer X轴偏移量vd->vinfo.xoffset
** yoffset,图在Framebuffer Y轴偏移量
** *img_ptr,即将写进FrameBuffer缓冲区指针
** Output : 无
** Created by:
** Created Date:
**-------------------------------------------------------------------
** Modified by:
** Modified Date:
**
** vd->finfo.line_length
** -------------------------------
** | yoffset |
** | xoffset * |
** | |
** | |
** -------------------------------
**
**------------------------------------------------------------------
********************************************************************************************************/
void rgb_to_framebuffer( fb_v41 *vd, //
int width,int height, // 图像大小
int xoffset,int yoffset, // 图像在Framebuffer偏移位置
unsigned short *img_ptr ) // 图像数据指针
{
int x,y;
int location;
unsigned short *loca_ptr;
// Figure out where in memory to put the pixel
for ( y = 0; y < height; y++ ) // 纵扫描
{
location = xoffset * 2 +
(y + yoffset) * vd->finfo.line_length;
loca_ptr = (unsigned short *) (vd->fbp + location); //每行始位置
for ( x = 0; x < width; x++ ) // 行扫描
{
*(loca_ptr + x) = *img_ptr++;
}
}
}
int open_framebuffer(char *ptr,fb_v41 *vd) //打开屏幕
{
int fbfd,screensize;
// Open the file for reading and writing
fbfd = open( ptr, O_RDWR);
if (fbfd < 0)
{
printf("Error: cannot open framebuffer device.%x\n",fbfd);
return ERR_FRAME_BUFFER;
}
printf("The framebuffer device was opened successfully.\n");
vd->fbfd = fbfd; // 保存打开FrameBuffer设备的句柄
// Get fixed screen information 获取FrameBuffer固定不变的信息vd->finfo
if (ioctl(fbfd, FBIOGET_FSCREENINFO, &vd->finfo))
{
printf("Error reading fixed information.\n");
return ERR_FRAME_BUFFER;
}
// Get variable screen information 获取FrameBuffer屏幕可变的信息vd->vinfo
if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vd->vinfo))
{
printf("Error reading variable information.\n");
return ERR_FRAME_BUFFER;
}
printf("%dx%d, %dbpp, xoffset=%d ,yoffset=%d \n", vd->vinfo.xres,
vd->vinfo.yres, vd->vinfo.bits_per_pixel,vd->vinfo.xoffset,vd->vinfo.yoffset );
// Figure out the size of the screen in bytes
screensize = vd->vinfo.xres * vd->vinfo.yres * vd->vinfo.bits_per_pixel / 8;
//screensize=长*宽*比特每像素 /8 (byte)
// Map the device to memory
vd->fbp = (char *)mmap(0,screensize,PROT_READ|PROT_WRITE,MAP_SHARED,fbfd,0); // 影射Framebuffer设备到内存
if ((int)vd->fbp == -1)
{
printf("Error: failed to map framebuffer device to memory.\n");
return ERR_FRAME_BUFFER;
}
printf("The framebuffer device was mapped to memory successfully.\n");
return 0;
}
int open_video( char *fileptr,fb_v41 *vd ,int dep,int pal,int width,int height)
{
// 打开视频设备
if ((vd->fd = open(fileptr, O_RDWR)) < 0) //打开摄像头
{
perror("v4l_open:");
return ERR_VIDEO_OPEN;
}
printf("=============Open Video Success=======================\n");
// 获取设备信息
if (ioctl(vd->fd, VIDIOCGCAP, &(vd->capability)) < 0)
{
perror("v4l_get_capability:");
return ERR_VIDEO_GCAP;
}
printf("=============Get Device Success=======================\n");
// 获取图象信息
if (ioctl(vd->fd, VIDIOCGPICT, &(vd->picture)) < 0)
{
perror("v4l_get_picture");
return ERR_VIDEO_GPIC;
}
printf("=============Get Picture Success======================\n");
// 根据获取的信息设置图象
vd->picture.palette = pal; // 调色板
vd->picture.depth = dep; // 像素深度
vd->mmap.format =pal;
if (ioctl(vd->fd, VIDIOCSPICT, &(vd->picture)) < 0)
{
perror("v4l_set_palette");
return ERR_VIDEO_SPIC;
}
//printf("flag1\n");
//
vd->mmap.width = width; // width;
vd->mmap.height = height; // height;
vd->mmap.format = vd->picture.palette;
vd->frame_current = 0;
vd->frame_using[0] = 0;
vd->frame_using[1] = 0;
// 获取缓冲映射信息
if (ioctl(vd->fd, VIDIOCGMBUF, &(vd->mbuf)) < 0)
{
perror("v4l_get_mbuf");
return -1;
}
// 建立设备内存映射
vd->map = mmap(0, vd->mbuf.size, PROT_READ|PROT_WRITE, MAP_SHARED, vd->fd, 0);
if ( vd->map < 0)
{
perror("v4l_mmap_init:mmap");
return -1;
}
printf("The video device was opened successfully.\n");
return 0;
}
int camera()
{
fb_v41 vd;
int ret,i;
unsigned short *imageptr;
unsigned short tempbuf[240*320];
ret = open_framebuffer(FB_FILE,&vd); // 打开FrameBuffer设备,fb是屏幕
if( 0!= ret ) // 打开FrameBuffer设备
{
printf("open framebuffer error\n");
goto err;
}
//printf("flag1\n");
for(i=0;i<240*320;i++) //初始化
tempbuf[i] = 0xffff;
//printf("flag2\n");
rgb_to_framebuffer(&vd,240,320,0,0,tempbuf); // 填充FrameBuffer颜色至整个屏幕,此时为白屏
//240*320是图像大小,(0,0)是屏幕偏移位置
ret = open_video( V4L_FILE, &vd , //打开摄像头
12, // 像素深度
15, // 设置调包板
240,320 );
if( 0!= ret ) // 打开视频设备失败
{
printf("open video error\n");
goto err;
}
while(1)
{
imageptr = (unsigned short *) get_next_frame_address( &vd ); //得到该帧的起始地址指针
rgb_to_framebuffer(&vd,vd.mmap.width,vd.mmap.height,
160,120,imageptr); //写图像数据到Framebuffer,也可以将数据不写到Framebuffer,而写到其他数组处
if(get_next_frame( &vd ) !=0 )
{ // 获取图像数据出错
goto err;
}
}
err:
if(vd.fbfd)
close(vd.fbfd); // 关闭FrameBuffer设备
if(vd.fd)
close(vd.fd);
exit(0);
return 0;
}
[解决办法]
你用的摄像头输出格式是什么呀,frambuffer只能显示RGB格式的图像。如果是JPEG或者YUV格式的输出,都要经过转化。