UVC采集卡是啥?
Posted
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了UVC采集卡是啥?相关的知识,希望对你有一定的参考价值。
同三维T510UP就是这种采集卡。免驱,USB2.0 av接口。 参考技术A UVC(USB Video Class) USB视频捕获设备UVC,全称为:USB video class 或USB video device class
UVC是Microsoft与另外几家设备厂商联合推出的为USB视频捕获设备定义的协议标准,目前已成为USB org标准之一。
USB视频捕获设备,比如网络摄像头(USB数字摄像头),USB视频图像采集卡等是支持UVC而且也是数量最多的UVC设备,免驱摄像头也就是UVC标准的摄像头,免驱摄像头是指遵循视频设备类(USB Video Class,简称UVC)标准协议的摄像头产品,由于自Windows XP SP2之后Windows操作系统自带了UVC驱动程序,因此遵循UVC标准的摄像头使用时无需安装额外的驱动程序,故此“免驱”。
USB协议中,除了通用的软硬件电气接口规范等,还包含了各种各样的Class协议,用来为不同的功能定义各自的标准接口和具体的总线上的数据交互格式和内容。这些Class协议的数量非常多,最常见的比如支持U盘功能的Mass Storage Class,以及通用的数据交换协议:CDC class。此外还包括Video、Audio Class, Print Class等等。正是由于这些Class协议,大大促进了USB设备的普及,比如说各个厂商生产的U盘都能通过操作系统自带的驱动程序来使用。
Video Class顾名思义是作为USB接口的视频设备的一个统一的数据交换规范。使用 UVC 的好处 USB 在 Video这块也成为一项标准了之后,硬件在各个程序之间彼此运行会更加顺利,而且也省略了驱动程序安装这一环节,操作系统只要是 Windows XP SP2 之后的版本都可以支持 UVC,Linux系统自2.4以后的内核都支持了大量的设备驱动,其中支持UVC设备。当然目前任何免驱动UVC设备都不能够实现在所有操作系统下的即插即用。
所以很多USB视频捕获设备基本上都是免驱的,能够即插即用PNP。使用UVC技术的包括摄像头、数码相机、类比影像转换器、电视棒及静态影像相机等设备。借助于操作系统的即插即用(PnP)能力,用户可以非常轻松地在PC上安装、配置和添加外设备。
通用即插即用(Universal Plug and Play,UPnP)进一步提升了这种简化性,它将整个网络包括在内,实现了网络设备和服务的发现和控制UPnP不仅仅是对即插即用外设模型的简单扩展。它旨在实现一种“零”配置和“隐性”的联网过程,自动发现和控制来自各家厂商的各种网络设备。
在PC CAM 或是Web cam 中, UVC及UAC已经是标准配备,UVC (usb video class) 和 UAC (USB Audio class) 简单的说,就是一种即插即用(Play &Plug) 的一种,遵行这两种协定,Device 端就可以直接使用,使用者不需要灌驱动程序。
ubuntu-Linux系统读取USB摄像头数据(uvc)
这几天在做小车的过程中,需要用到图像采集。我想现在用的摄像头是UVC免驱的。根据国嵌的教程中有一个gspca摄像头的程序。我发现把gspca的采集程序用到uvc上时,在显示图像的时候提示没有huffman表。但是在显示gspca的摄像头时却没有问题。为此特别找了以下的程序来获取uvc摄像头的数据。
程序代码:
/* * capturing from UVC cam * requires: libjpeg-dev * build: gcc -std=c99 capture.c -ljpeg -o capture */ #include <stdint.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <errno.h> #include <fcntl.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <asm/types.h> #include <linux/videodev2.h> #include <sys/time.h> #include <sys/types.h> #include <unistd.h> #include <jpeglib.h> void quit(const char * msg) { fprintf(stderr, "[%s] %d: %s\n", msg, errno, strerror(errno)); exit(EXIT_FAILURE); } int xioctl(int fd, int request, void* arg) { for (int i = 0; i < 100; i++) { int r = ioctl(fd, request, arg); if (r != -1 || errno != EINTR) return r; } return -1; } typedef struct { uint8_t* start; size_t length; } buffer_t; typedef struct { int fd; uint32_t width; uint32_t height; size_t buffer_count; buffer_t* buffers; buffer_t head; } camera_t; camera_t* camera_open(const char * device, uint32_t width, uint32_t height) { int fd = open(device, O_RDWR | O_NONBLOCK, 0); if (fd == -1) quit("open"); camera_t* camera = malloc(sizeof (camera_t)); camera->fd = fd; camera->width = width; camera->height = height; camera->buffer_count = 0; camera->buffers = NULL; camera->head.length = 0; camera->head.start = NULL; return camera; } void camera_init(camera_t* camera) { struct v4l2_capability cap; if (xioctl(camera->fd, VIDIOC_QUERYCAP, &cap) == -1) quit("VIDIOC_QUERYCAP"); if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) quit("no capture"); if (!(cap.capabilities & V4L2_CAP_STREAMING)) quit("no streaming"); struct v4l2_cropcap cropcap; memset(&cropcap, 0, sizeof cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (xioctl(camera->fd, VIDIOC_CROPCAP, &cropcap) == 0) { struct v4l2_crop crop; crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; if (xioctl(camera->fd, VIDIOC_S_CROP, &crop) == -1) { // cropping not supported } } struct v4l2_format format; memset(&format, 0, sizeof format); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.width = camera->width; format.fmt.pix.height = camera->height; format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; format.fmt.pix.field = V4L2_FIELD_NONE; if (xioctl(camera->fd, VIDIOC_S_FMT, &format) == -1) quit("VIDIOC_S_FMT"); struct v4l2_requestbuffers req; memset(&req, 0, sizeof req); req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (xioctl(camera->fd, VIDIOC_REQBUFS, &req) == -1) quit("VIDIOC_REQBUFS"); camera->buffer_count = req.count; camera->buffers = calloc(req.count, sizeof (buffer_t)); size_t buf_max = 0; for (size_t i = 0; i < camera->buffer_count; i++) { struct v4l2_buffer buf; memset(&buf, 0, sizeof buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if (xioctl(camera->fd, VIDIOC_QUERYBUF, &buf) == -1) quit("VIDIOC_QUERYBUF"); if (buf.length > buf_max) buf_max = buf.length; camera->buffers[i].length = buf.length; camera->buffers[i].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera->fd, buf.m.offset); if (camera->buffers[i].start == MAP_FAILED) quit("mmap"); } camera->head.start = malloc(buf_max); } void camera_start(camera_t* camera) { for (size_t i = 0; i < camera->buffer_count; i++) { struct v4l2_buffer buf; memset(&buf, 0, sizeof buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) quit("VIDIOC_QBUF"); } enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (xioctl(camera->fd, VIDIOC_STREAMON, &type) == -1) quit("VIDIOC_STREAMON"); } void camera_stop(camera_t* camera) { enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) == -1) quit("VIDIOC_STREAMOFF"); } void camera_finish(camera_t* camera) { for (size_t i = 0; i < camera->buffer_count; i++) { munmap(camera->buffers[i].start, camera->buffers[i].length); } free(camera->buffers); camera->buffer_count = 0; camera->buffers = NULL; free(camera->head.start); camera->head.length = 0; camera->head.start = NULL; } void camera_close(camera_t* camera) { if (close(camera->fd) == -1) quit("close"); free(camera); } int camera_capture(camera_t* camera) { struct v4l2_buffer buf; memset(&buf, 0, sizeof buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if (xioctl(camera->fd, VIDIOC_DQBUF, &buf) == -1) return FALSE; memcpy(camera->head.start, camera->buffers[buf.index].start, buf.bytesused); camera->head.length = buf.bytesused; if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) return FALSE; return TRUE; } int camera_frame(camera_t* camera, struct timeval timeout) { fd_set fds; FD_ZERO(&fds); FD_SET(camera->fd, &fds); int r = select(camera->fd + 1, &fds, 0, 0, &timeout); if (r == -1) quit("select"); if (r == 0) return FALSE; return camera_capture(camera); } void jpeg(FILE* dest, uint8_t* rgb, uint32_t width, uint32_t height, int quality) { JSAMPARRAY image; image = calloc(height, sizeof (JSAMPROW)); for (size_t i = 0; i < height; i++) { image[i] = calloc(width * 3, sizeof (JSAMPLE)); for (size_t j = 0; j < width; j++) { image[i][j * 3 + 0] = rgb[(i * width + j) * 3 + 0]; image[i][j * 3 + 1] = rgb[(i * width + j) * 3 + 1]; image[i][j * 3 + 2] = rgb[(i * width + j) * 3 + 2]; } } struct jpeg_compress_struct compress; struct jpeg_error_mgr error; compress.err = jpeg_std_error(&error); jpeg_create_compress(&compress); jpeg_stdio_dest(&compress, dest); compress.image_width = width; compress.image_height = height; compress.input_components = 3; compress.in_color_space = JCS_RGB; jpeg_set_defaults(&compress); jpeg_set_quality(&compress, quality, TRUE); jpeg_start_compress(&compress, TRUE); jpeg_write_scanlines(&compress, image, height); jpeg_finish_compress(&compress); jpeg_destroy_compress(&compress); for (size_t i = 0; i < height; i++) { free(image[i]); } free(image); } int minmax(int min, int v, int max) { return (v < min) ? min : (max < v) ? max : v; } uint8_t* yuyv2rgb(uint8_t* yuyv, uint32_t width, uint32_t height) { uint8_t* rgb = calloc(width * height * 3, sizeof (uint8_t)); for (size_t i = 0; i < height; i++) { for (size_t j = 0; j < width; j += 2) { size_t index = i * width + j; int y0 = yuyv[index * 2 + 0] << 8; int u = yuyv[index * 2 + 1] - 128; int y1 = yuyv[index * 2 + 2] << 8; int v = yuyv[index * 2 + 3] - 128; rgb[index * 3 + 0] = minmax(0, (y0 + 359 * v) >> 8, 255); rgb[index * 3 + 1] = minmax(0, (y0 + 88 * v - 183 * u) >> 8, 255); rgb[index * 3 + 2] = minmax(0, (y0 + 454 * u) >> 8, 255); rgb[index * 3 + 3] = minmax(0, (y1 + 359 * v) >> 8, 255); rgb[index * 3 + 4] = minmax(0, (y1 + 88 * v - 183 * u) >> 8, 255); rgb[index * 3 + 5] = minmax(0, (y1 + 454 * u) >> 8, 255); } } return rgb; } int main() { camera_t* camera = camera_open("/dev/video0", 352, 288); camera_init(camera); camera_start(camera); struct timeval timeout; timeout.tv_sec = 1; timeout.tv_usec = 0; /* skip 5 frames for booting a cam */ for (int i = 0; i < 5; i++) { camera_frame(camera, timeout); } camera_frame(camera, timeout); unsigned char* rgb = yuyv2rgb(camera->head.start, camera->width, camera->height); FILE* out = fopen("result.jpg", "w"); jpeg(out, rgb, camera->width, camera->height, 100); fclose(out); free(rgb); camera_stop(camera); camera_finish(camera); camera_close(camera); return 0; }
以上是关于UVC采集卡是啥?的主要内容,如果未能解决你的问题,请参考以下文章
有人用arm开发板用UVC驱动的USB camera采集到640*480的视频过吗?
Android平台GB28181接入端如何对接UVC摄像头?