项目简介
本次编程实战主要是围绕嵌入式linux v4l2采集框架展开,包括以下几个部分:
- v4l2视频采集
- IPU转码
- framebuffer显示
- 自定义UDP简单协议进行传输
- 上位机软件(QT)
首先是采集部分
#include "includes.h" int fd_cam;
struct cam_buffer *buffers=NULL;
unsigned int n_buffers=;
int frameIndex=; void initVideo()
{
int ret;
struct v4l2_capability cam_cap; //显示设备信息
struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力
struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT
struct v4l2_crop cam_crop; //图像的缩放
struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等 /*设备的打开*/
fd_cam = open( USB_VIDEO, O_RDWR );
if( fd_cam< )
printf("Can't open video device\n"); /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
ret = ioctl( fd_cam,VIDIOC_QUERYCAP,&cam_cap );
if( ret< ) {
printf("Can't get device information: VIDIOCGCAP\n");
}
printf("Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
cam_cap.driver,cam_cap.card,cam_cap.bus_info,(cam_cap.version>>)&0XFF,
(cam_cap.version>>)&0XFF,cam_cap.version&0XFF); /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
cam_fmtdesc.index=;
cam_fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format:\n");
while(ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -)
{
printf("\t%d.%s\n",cam_fmtdesc.index+,cam_fmtdesc.description);
cam_fmtdesc.index++;
} /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
cam_cropcap.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if( == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)){
printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
cam_cropcap.defrect.left,cam_cropcap.defrect.top,
cam_cropcap.defrect.width,cam_cropcap.defrect.height);
/* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_crop.c = cam_cropcap.defrect;//默认取景窗口大小
if(- == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)){
//printf("Can't set crop para\n");
}
}
else{
printf("Can't set cropcap para\n");
} /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_format.fmt.pix.width = ;
cam_format.fmt.pix.height = ;
cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//要和摄像头支持的类型对应
cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret=ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
if(ret<){
printf("Can't set frame information\n");
}
/* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
cam_format.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret=ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
if(ret<){
printf("Can't get frame information\n");
}
printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
cam_format.fmt.pix.width,cam_format.fmt.pix.height);
ret=initBuffers();
if(ret<){
printf("Buffers init error\n");
//exit(-1);
}
} void closeVideo()
{
//stopCapture();
//freeBuffers();
close(fd_cam);
} int initBuffers()
{
int ret;
/* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
struct v4l2_requestbuffers req;
CLEAR(req);
req.count=;
req.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ret=ioctl(fd_cam, VIDIOC_REQBUFS, &req);
if(ret<){
printf("Request frame buffers failed\n");
return -;
}
if(req.count<){
printf("Request frame buffers while insufficient buffer memory\n");
return -;
}
buffers = (struct cam_buffer*)calloc(req.count, sizeof(*buffers));
if(!buffers){
printf("Out of memory\n");
return -;
}
for(n_buffers = ; n_buffers < req.count; n_buffers++){
struct v4l2_buffer buf;
CLEAR(buf);
// 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
ret=ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
if(ret< )
{
printf("VIDIOC_QUERYBUF %d failed\n",n_buffers);
return -;
}
buffers[n_buffers].length = buf.length;
// 映射内存
buffers[n_buffers].start =
mmap(NULL, // start anywhere
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd_cam, buf.m.offset);
if(MAP_FAILED == buffers[n_buffers].start)
{
printf("mmap buffer%d failed\n",n_buffers);
return -;
} }
return ;
}
int startCapture()
{
unsigned int i;
//struct v4l2_buffer buf;
for(i=;i<n_buffers;i++){
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory =V4L2_MEMORY_MMAP;
buf.index = i;
// fprintf(stderr, "n_buffers: %d\n", i);
if(- == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
printf("VIDIOC_QBUF buffer%d failed\n",i);
return -;
}
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(- == ioctl(fd_cam, VIDIOC_STREAMON, &type)){
printf("VIDIOC_STREAMON error");
return -;
}
return ;
}
int stopCapture()
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(- == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)){
printf("VIDIOC_STREAMOFF error\n");
return -;
}
return ;
}
int freeBuffers()
{
unsigned int i;
for(i = ; i < n_buffers; ++i){
if(- == munmap(buffers[i].start, buffers[i].length)){
printf("munmap buffer%d failed\n",i);
return -;
}
}
free(buffers);
return ;
}
int getFrame(void **frame_buf, size_t* len)
{
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if(- == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)){
printf("VIDIOC_DQBUF error\n");
return -;
}
printf("queue_buf.index=%d\n",queue_buf.index);
//pthread_rwlock_wrlock(&rwlock);
*frame_buf = buffers[queue_buf.index].start;
*len = buffers[queue_buf.index].length;
frameIndex = queue_buf.index;
//pthread_rwlock_unlock(&rwlock);
return ;
}
int backFrame()
{
if(frameIndex != -){
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
queue_buf.index = frameIndex;
if(- == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)){
printf("VIDIOC_QBUF error\n");
return -;
}
return ;
}
return -;
} /*yuv格式转换为rgb格式*/
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = ;
unsigned int pixel_16;
unsigned char pixel_24[];
unsigned int pixel32;
int y0, u, y1, v;
struct timeval starttime,endtime;
gettimeofday(&starttime,);
for(in = ; in < width * height * ; in += ) {
pixel_16 =
yuv[in + ] << |
yuv[in + ] << |
yuv[in + ] << |
yuv[in + ];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> ;
y1 = (pixel_16 & 0x00ff0000) >> ;
v = (pixel_16 & 0xff000000) >> ;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[] = (pixel32 & 0x000000ff);
pixel_24[] = (pixel32 & 0x0000ff00) >> ;
pixel_24[] = (pixel32 & 0x00ff0000) >> ;
//pthread_rwlock_wrlock(&rwlock);
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
//pthread_rwlock_unlock(&rwlock);
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[] = (pixel32 & 0x000000ff);
pixel_24[] = (pixel32 & 0x0000ff00) >> ;
pixel_24[] = (pixel32 & 0x00ff0000) >> ;
//pthread_rwlock_wrlock(&rwlock);
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
//pthread_rwlock_unlock(&rwlock);
}
gettimeofday(&endtime,);
double timeuse = *(endtime.tv_sec - starttime.tv_sec)+endtime.tv_usec-starttime.tv_usec;
timeuse /=;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
printf("yuv2rgb use %f ms\n",timeuse);
return ;
}
int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = ;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-));
g = y - (0.698001 * (v-)) - (0.337633 * (u-));
b = y + (1.732446 * (u-));
if(r > ) r = ;
if(g > ) g = ;
if(b > ) b = ;
if(r < ) r = ;
if(g < ) g = ;
if(b < ) b = ;
pixel[] = r * / ;
pixel[] = g * / ;
pixel[] = b * / ;
return pixel32;
}
之后是IPU部分
#include "includes.h" int fd_ipu=;
struct ipu_task taskCam;
struct timeval begintime, endtime;
unsigned int ipuOutputSize=,ipuInputSize=;
void *inbuf=NULL;
void *outbuf=NULL;
/***************与软件解码对应的IPU解码**************************/
void initIPU()
{
int ret;
CLEAR(taskCam);
// Input image size and format
taskCam.input.width = ;
taskCam.input.height = ;
taskCam.input.format = v4l2_fourcc('Y', 'U', 'Y', 'V');
//
// taskCam.input.crop.pos.x = 0;
// taskCam.input.crop.pos.y = 0;
// taskCam.input.crop.w = 0;
// taskCam.input.crop.h = 0; // Output image size and format
taskCam.output.width = ;
taskCam.output.height = ;
taskCam.output.format = v4l2_fourcc('B', 'G', 'R', ''); // taskCam.output.crop.pos.x = 300;
// taskCam.output.crop.pos.y = 300;
// taskCam.output.crop.w = 300;
// taskCam.output.crop.h = 300;
// Open IPU device
fd_ipu = open(IPUDEV, O_RDWR, );
if (fd_ipu < ) {
printf("open ipu dev fail\n");
}
ipuOutputSize=taskCam.output.paddr= taskCam.output.width * taskCam.output.height
* fmt_to_bpp(taskCam.output.format)/;
printf("ipuOutputSize=%d\n",ipuOutputSize);
ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.output.paddr);
if (ret < ) {
printf("ioctl IPU_ALLOC fail\n");
}
outbuf= mmap(, ipuOutputSize, PROT_READ | PROT_WRITE,
MAP_SHARED, fd_ipu, taskCam.output.paddr);
if (!outbuf) {
printf("mmap ipu output image fail\n");
}
ipuInputSize =taskCam.input.paddr=taskCam.input.width * taskCam.input.height
* fmt_to_bpp(taskCam.input.format)/;
printf("ipuInputSize=%d\n",ipuInputSize);
ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.input.paddr);
if (ret < ) {
printf("ioctl IPU_ALLOC fail: (errno = %d)\n", errno);
}
inbuf = mmap(, ipuInputSize, PROT_READ | PROT_WRITE,
MAP_SHARED, fd_ipu, taskCam.input.paddr);
if (!inbuf) {
printf("mmap ipu input image fail\n");
}
}
void IPUConvent(void *in,void *out)
{
int ret;
memcpy(inbuf, in, ipuInputSize);
gettimeofday(&begintime, NULL);
// Perform color space conversion
ret = ioctl(fd_ipu, IPU_QUEUE_TASK, &taskCam);
if (ret < ) {
printf("ioct IPU_QUEUE_TASK fail %x\n", ret);
}
gettimeofday(&endtime, NULL);
double timeuse = *(endtime.tv_sec - begintime.tv_sec)+endtime.tv_usec-begintime.tv_usec;
timeuse /=;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
printf("yuv2rgb use %f ms\n",timeuse);
memcpy(out,outbuf,ipuOutputSize);
}
void closeIPU()
{
if(rgbFrame)munmap(rgbFrame, ipuOutputSize);
if(inbuf)munmap(inbuf, ipuInputSize);
if (taskCam.input.paddr)
ioctl(fd_ipu, IPU_FREE, &taskCam.input.paddr);
}
然后是framebuffer显示
#include "includes.h" int fd_fb0;
long int screensize = ;
char *fb_buf = ;
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo; void InitDisOnFrameBuffer()
{
// Open the file for reading and writing
fd_fb0 = open(DISON_FB0, O_RDWR);
if (!fd_fb0) {
printf("Error: cannot open framebuffer device.\n");
exit();
}
printf("The framebuffer device was opened successfully.\n"); // Get fixed screen information
if (ioctl(fd_fb0, FBIOGET_FSCREENINFO, &finfo)) {
printf("Error reading fixed information.\n");
exit();
} // Get variable screen information
if (ioctl(fd_fb0, FBIOGET_VSCREENINFO, &vinfo)) {
printf("Error reading variable information.\n");
exit();
}
printf("%dx%d, %dbpp\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel ); // Figure out the size of the screen in bytes
screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / ;
printf("screensize=%d\n",screensize); // Map the device to memory
fb_buf = (char *)mmap(, screensize, PROT_READ | PROT_WRITE, MAP_SHARED,
fd_fb0, );
if ((int)fb_buf == -) {
printf("Error: failed to map framebuffer device to memory.\n");
exit();
}
printf("The framebuffer device was mapped to memory successfully.\n");
}
void DisOnFrameBuffer(unsigned char *frame)
{
//memcpy(fb_buf,frame,640* 480* 3 * sizeof(char));
int x = , y = ;
long int location = ;
// Figure out where in memory to put the pixel
for ( y = ; y < ; y++ )
for ( x = ; x < ; x++ ) {
location = (x+vinfo.xoffset) * (vinfo.bits_per_pixel/) +
(y+vinfo.yoffset) * finfo.line_length;
if ( vinfo.bits_per_pixel == ) {
//rgb32 bgra
*(fb_buf + location ) = *frame;frame++; // Some blue
*(fb_buf + location + ) = *frame;frame++; // A little green
*(fb_buf + location + ) = *frame;frame++; //A lot of red//frame[480*y+x+2];
*(fb_buf + location + ) = ; // No transparency
}
else { //assume 16bpp
int b = ;
int g = (x-)/; // A little green
int r = -(y-)/; // A lot of red
unsigned short int t = r<< | g << | b;
*((unsigned short int*)(fb_buf + location)) = t;
}
}
}
void CloseDisOnFrameBuffer()
{
munmap(fb_buf, screensize);
close(fd_fb0);
}
UDP部分
#include "includes.h" struct sockaddr_in serveraddr;
int confd; char udpRecbuf[MAXLINE]; void initUDPTrans()
{
//1.创建一个socket
confd=socket(AF_INET,SOCK_DGRAM,);
//2.初始化服务器地址
bzero(&serveraddr,sizeof(serveraddr));
serveraddr.sin_family=AF_INET;
//
inet_pton(AF_INET,SEVER_IP,&serveraddr.sin_addr.s_addr);
serveraddr.sin_port =htons(SERVER_PORT);
}
void sendUDPdata(void *datas,unsigned int size)
{
size_t len,i,j;//分成1800块 每块512
char tempflag;
struct udptransbuf data;
for(i=;i<;i++){
memcpy(data.buf,datas+i*BLOCKSIZE,BLOCKSIZE);
// for(j=0;j<BLOCKSIZE;j++)
// data.buf[j]= (unsigned char*)(datas+i*BLOCKSIZE+j);
if(i==){
tempflag='a';
data.flag=tempflag;
}
else{
tempflag++;
data.flag=tempflag;
}
//3向务器发送数据
len=sendto(confd,(void*)&data,sizeof(data),,(struct sockaddr *)&serveraddr,sizeof(serveraddr));
if(len<)
printf("UDP send failed\n");
}
//char udpSendbuf[MAXLINE]="125wwew3332354#@$#";
}
void recUDPdata(char *udpRecbuf)//这里要求传入的是数组,要是指针需要修改
{
size_t len;
len=recvfrom(confd,udpRecbuf,sizeof(udpRecbuf),,NULL,);
write(STDIN_FILENO,udpRecbuf,len);
}
void closeUDPtrans()
{
close(confd);
}
完整工程
https://github.com/tla001/CapTrans